diff --git a/sdk/confluent/azure-resourcemanager-confluent/CHANGELOG.md b/sdk/confluent/azure-resourcemanager-confluent/CHANGELOG.md index eb2af443e718..400fa06b5f22 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/CHANGELOG.md +++ b/sdk/confluent/azure-resourcemanager-confluent/CHANGELOG.md @@ -1,14 +1,129 @@ # Release History -## 1.3.0-beta.1 (Unreleased) +## 1.0.0-beta.1 (2025-04-30) + +- Azure Resource Manager Confluent client library for Java. This package contains Microsoft Azure SDK for Confluent Management SDK. Package tag package-2024-07. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). ### Features Added -### Breaking Changes +* `models.ConnectorStatus` was added + +* `models.DataFormatType` was added + +* `models.Clusters` was added + +* `models.ConnectorResource` was added + +* `models.KafkaAzureBlobStorageSourceConnectorInfo` was added + +* `models.TopicMetadataEntity` was added + +* `models.ListTopicsSuccessResponse` was added + +* `models.ConnectorServiceTypeInfoBase` was added + +* `models.PartnerConnectorType` was added + +* `models.Topics` was added + +* `models.SCEnvironmentRecord$DefinitionStages` was added + +* `models.SCEnvironmentRecord$UpdateStages` was added + +* `models.TopicRecord$DefinitionStages` was added + +* `models.ConnectorServiceType` was added + +* `models.ConnectorResource$Update` was added + +* `models.KafkaAzureSynapseAnalyticsSinkConnectorInfo` was added + +* `models.StreamGovernanceConfig` was added + +* `models.SCClusterRecord$DefinitionStages` was added + +* `models.AuthType` was added + +* `models.AzureSynapseAnalyticsSinkConnectorServiceInfo` was added + +* `models.AzureCosmosDBSourceConnectorServiceInfo` was added + +* `models.ConnectorResource$DefinitionStages` was added + +* `models.Connectors` was added + +* `models.Package` was added + +* `models.SCEnvironmentRecord$Update` was added + +* `models.SCClusterRecord$UpdateStages` was added + +* `models.KafkaAzureCosmosDBSinkConnectorInfo` was added + +* `models.TopicsInputConfig` was added -### Bugs Fixed +* `models.PartnerInfoBase` was added -### Other Changes +* `models.AzureBlobStorageSourceConnectorServiceInfo` was added + +* `models.TopicsRelatedLink` was added + +* `models.KafkaAzureCosmosDBSourceConnectorInfo` was added + +* `models.SCEnvironmentRecord$Definition` was added + +* `models.TopicRecord` was added + +* `models.TopicRecord$Definition` was added + +* `models.ConnectorResource$Definition` was added + +* `models.ConnectorInfoBase` was added + +* `models.ConnectorResource$UpdateStages` was added + +* `models.AzureBlobStorageSinkConnectorServiceInfo` was added + +* `models.ListConnectorsSuccessResponse` was added + +* `models.ConnectorType` was added + +* `models.SCClusterRecord$Update` was added + +* `models.KafkaAzureBlobStorageSinkConnectorInfo` was added + +* `models.AzureCosmosDBSinkConnectorServiceInfo` was added + +* `models.Environments` was added + +* `models.ConnectorClass` was added + +* `models.SCClusterRecord$Definition` was added + +#### `models.SCEnvironmentRecord` was modified + +* `streamGovernanceConfig()` was added +* `type()` was added +* `update()` was added +* `resourceGroupName()` was added + +#### `models.SCClusterSpecEntity` was modified + +* `packageProperty()` was added +* `withPackageProperty(models.Package)` was added + +#### `ConfluentManager` was modified + +* `topics()` was added +* `clusters()` was added +* `environments()` was added +* `connectors()` was added + +#### `models.SCClusterRecord` was modified + +* `type()` was added +* `resourceGroupName()` was added +* `update()` was added ## 1.2.0 (2024-12-19) diff --git a/sdk/confluent/azure-resourcemanager-confluent/README.md b/sdk/confluent/azure-resourcemanager-confluent/README.md index 0e0edfdc1ad2..9098f8e96fe8 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/README.md +++ b/sdk/confluent/azure-resourcemanager-confluent/README.md @@ -2,7 +2,7 @@ Azure Resource Manager Confluent client library for Java. -This package contains Microsoft Azure SDK for Confluent Management SDK. Package tag package-2024-02. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). +This package contains Microsoft Azure SDK for Confluent Management SDK. Package tag package-2024-07. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt). ## We'd love to hear your feedback @@ -52,7 +52,7 @@ Azure subscription ID can be configured via `AZURE_SUBSCRIPTION_ID` environment Assuming the use of the `DefaultAzureCredential` credential class, the client can be authenticated using the following code: ```java -AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); +AzureProfile profile = new AzureProfile(AzureCloud.AZURE_PUBLIC_CLOUD); TokenCredential credential = new DefaultAzureCredentialBuilder() .authorityHost(profile.getEnvironment().getActiveDirectoryEndpoint()) .build(); @@ -60,7 +60,7 @@ ConfluentManager manager = ConfluentManager .authenticate(credential, profile); ``` -The sample code assumes global Azure. Please change `AzureEnvironment.AZURE` variable if otherwise. +The sample code assumes global Azure. Please change the `AzureCloud.AZURE_PUBLIC_CLOUD` variable if otherwise. See [Authentication][authenticate] for more options. @@ -100,5 +100,3 @@ This project has adopted the [Microsoft Open Source Code of Conduct][coc]. For m [cg]: https://github.com/Azure/azure-sdk-for-java/blob/main/CONTRIBUTING.md [coc]: https://opensource.microsoft.com/codeofconduct/ [coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ - - diff --git a/sdk/confluent/azure-resourcemanager-confluent/SAMPLE.md b/sdk/confluent/azure-resourcemanager-confluent/SAMPLE.md index f699596d5189..a29bb1c96306 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/SAMPLE.md +++ b/sdk/confluent/azure-resourcemanager-confluent/SAMPLE.md @@ -14,6 +14,23 @@ - [ListServiceAccounts](#access_listserviceaccounts) - [ListUsers](#access_listusers) +## Cluster + +- [CreateOrUpdate](#cluster_createorupdate) +- [Delete](#cluster_delete) + +## Connector + +- [CreateOrUpdate](#connector_createorupdate) +- [Delete](#connector_delete) +- [Get](#connector_get) +- [List](#connector_list) + +## Environment + +- [CreateOrUpdate](#environment_createorupdate) +- [Delete](#environment_delete) + ## MarketplaceAgreements - [Create](#marketplaceagreements_create) @@ -42,6 +59,13 @@ - [List](#organizationoperations_list) +## Topics + +- [Create](#topics_create) +- [Delete](#topics_delete) +- [Get](#topics_get) +- [List](#topics_list) + ## Validations - [ValidateOrganization](#validations_validateorganization) @@ -49,29 +73,22 @@ ### Access_CreateRoleBinding ```java -import com.azure.resourcemanager.confluent.models.AccessCreateRoleBindingRequestModel; - /** - * Samples for Access CreateRoleBinding. + * Samples for Connector Delete. */ -public final class AccessCreateRoleBindingSamples { +public final class ConnectorDeleteSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_CreateRoleBinding. - * json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_DeleteConnectorByName.json */ /** - * Sample code: Access_CreateRoleBinding. + * Sample code: Connector_Delete. * * @param manager Entry point to ConfluentManager. */ - public static void accessCreateRoleBinding(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .createRoleBindingWithResponse("myResourceGroup", "myOrganization", - new AccessCreateRoleBindingRequestModel().withPrincipal("User:u-111aaa") - .withRoleName("CloudClusterAdmin") - .withCrnPattern( - "crn://confluent.cloud/organization=1111aaaa-11aa-11aa-11aa-111111aaaaaa/environment=env-aaa1111/cloud-cluster=lkc-1111aaa"), + public static void connectorDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "connector-1", com.azure.core.util.Context.NONE); } } @@ -80,59 +97,36 @@ public final class AccessCreateRoleBindingSamples { ### Access_DeleteRoleBinding ```java -/** - * Samples for Access DeleteRoleBinding. - */ -public final class AccessDeleteRoleBindingSamples { - /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_DeleteRoleBinding. - * json - */ - /** - * Sample code: Access_DeleteRoleBinding. - * - * @param manager Entry point to ConfluentManager. - */ - public static void accessDeleteRoleBinding(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .deleteRoleBindingWithResponse("myResourceGroup", "myOrganization", "dlz-f3a90de", - com.azure.core.util.Context.NONE); - } -} -``` - -### Access_InviteUser - -```java -import com.azure.resourcemanager.confluent.models.AccessInviteUserAccountModel; -import com.azure.resourcemanager.confluent.models.AccessInvitedUserDetails; +import com.azure.resourcemanager.confluent.models.Package; +import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; +import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; /** - * Samples for Access InviteUser. + * Samples for Cluster CreateOrUpdate. */ -public final class AccessInviteUserSamples { +public final class ClusterCreateOrUpdateSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_InviteUser.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Cluster_Create.json */ /** - * Sample code: Access_InviteUser. + * Sample code: Cluster_CreateOrUpdate. * * @param manager Entry point to ConfluentManager. */ - public static void accessInviteUser(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .inviteUserWithResponse("myResourceGroup", "myOrganization", - new AccessInviteUserAccountModel() - .withInvitedUserDetails(new AccessInvitedUserDetails().withInvitedEmail("user2@onmicrosoft.com") - .withAuthType("AUTH_TYPE_SSO")), - com.azure.core.util.Context.NONE); + public static void clusterCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.clusters() + .define("cluster-1") + .withExistingEnvironment("myResourceGroup", "myOrganization", "env-1") + .withSpec(new SCClusterSpecEntity().withPackageProperty(Package.ESSENTIALS) + .withRegion("us-east4") + .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("env-1"))) + .create(); } } ``` -### Access_ListClusters +### Access_InviteUser ```java import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; @@ -140,22 +134,24 @@ import java.util.HashMap; import java.util.Map; /** - * Samples for Access ListClusters. + * Samples for Access ListRoleBindingNameList. */ -public final class AccessListClustersSamples { +public final class AccessListRoleBindingNameListSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_ClusterList.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Access_RoleBindingNameList.json */ /** - * Sample code: Access_ClusterList. + * Sample code: Access_RoleBindingNameList. * * @param manager Entry point to ConfluentManager. */ - public static void accessClusterList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void accessRoleBindingNameList(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.access() - .listClustersWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() - .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + .listRoleBindingNameListWithResponse("myResourceGroup", "myOrganization", + new ListAccessRequestModel().withSearchFilters(mapOf("crn_pattern", + "crn://confluent.cloud/organization=1aa7de07-298e-479c-8f2f-16ac91fd8e76", "namespace", + "public,dataplane,networking,identity,datagovernance,connect,streamcatalog,pipelines,ksql")), com.azure.core.util.Context.NONE); } @@ -173,7 +169,7 @@ public final class AccessListClustersSamples { } ``` -### Access_ListEnvironments +### Access_ListClusters ```java import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; @@ -181,23 +177,24 @@ import java.util.HashMap; import java.util.Map; /** - * Samples for Access ListEnvironments. + * Samples for Organization ListRegions. */ -public final class AccessListEnvironmentsSamples { +public final class OrganizationListRegionsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_EnvironmentList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_ListRegions. * json */ /** - * Sample code: Access_EnvironmentList. + * Sample code: Organization_ListRegions. * * @param manager Entry point to ConfluentManager. */ - public static void accessEnvironmentList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listEnvironmentsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() - .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + public static void organizationListRegions(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .listRegionsWithResponse("myResourceGroup", "myOrganization", + new ListAccessRequestModel() + .withSearchFilters(mapOf("cloud", "azure", "packages", "ADVANCED,ESSENTIALS", "region", "eastus")), com.azure.core.util.Context.NONE); } @@ -215,158 +212,117 @@ public final class AccessListEnvironmentsSamples { } ``` -### Access_ListInvitations +### Access_ListEnvironments ```java -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.util.HashMap; -import java.util.Map; - /** - * Samples for Access ListInvitations. + * Samples for Topics Get. */ -public final class AccessListInvitationsSamples { +public final class TopicsGetSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_InvitationsList. - * json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Get.json */ /** - * Sample code: Access_InvitationsList. + * Sample code: Topics_Get. * * @param manager Entry point to ConfluentManager. */ - public static void accessInvitationsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listInvitationsWithResponse("myResourceGroup", "myOrganization", - new ListAccessRequestModel().withSearchFilters( - mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder", "status", "INVITE_STATUS_SENT")), + public static void topicsGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .getWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "topic-1", com.azure.core.util.Context.NONE); } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } } ``` -### Access_ListRoleBindingNameList +### Access_ListInvitations ```java -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.util.HashMap; -import java.util.Map; - /** - * Samples for Access ListRoleBindingNameList. + * Samples for OrganizationOperations List. */ -public final class AccessListRoleBindingNameListSamples { +public final class OrganizationOperationsListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Access_RoleBindingNameList.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * OrganizationOperations_List.json */ /** - * Sample code: Access_RoleBindingNameList. + * Sample code: OrganizationOperations_List. * * @param manager Entry point to ConfluentManager. */ - public static void accessRoleBindingNameList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listRoleBindingNameListWithResponse("myResourceGroup", "myOrganization", - new ListAccessRequestModel().withSearchFilters(mapOf("crn_pattern", - "crn://confluent.cloud/organization=1aa7de07-298e-479c-8f2f-16ac91fd8e76", "namespace", - "public,dataplane,networking,identity,datagovernance,connect,streamcatalog,pipelines,ksql")), - com.azure.core.util.Context.NONE); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + public static void organizationOperationsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizationOperations().list(com.azure.core.util.Context.NONE); } } ``` -### Access_ListRoleBindings +### Access_ListRoleBindingNameList ```java -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.util.HashMap; -import java.util.Map; - /** - * Samples for Access ListRoleBindings. + * Samples for Topics List. */ -public final class AccessListRoleBindingsSamples { +public final class TopicsListSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_RoleBindingList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_TopicList. * json */ /** - * Sample code: Access_RoleBindingList. + * Sample code: Organization_ListTopics. * * @param manager Entry point to ConfluentManager. */ - public static void accessRoleBindingList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listRoleBindingsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() - .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + public static void organizationListTopics(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .list("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", 10, null, com.azure.core.util.Context.NONE); } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } } ``` -### Access_ListServiceAccounts +### Access_ListRoleBindings ```java -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; +import com.azure.resourcemanager.confluent.fluent.models.OrganizationResourceInner; +import com.azure.resourcemanager.confluent.models.OfferDetail; +import com.azure.resourcemanager.confluent.models.UserDetail; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; /** - * Samples for Access ListServiceAccounts. + * Samples for Validations ValidateOrganization. */ -public final class AccessListServiceAccountsSamples { +public final class ValidationsValidateOrganizationSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Access_ServiceAccountsList.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Validations_ValidateOrganizations.json */ /** - * Sample code: Access_ServiceAccountsList. + * Sample code: Validations_ValidateOrganizations. * * @param manager Entry point to ConfluentManager. */ - public static void accessServiceAccountsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listServiceAccountsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() - .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + public static void validationsValidateOrganizations(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.validations() + .validateOrganizationWithResponse("myResourceGroup", "myOrganization", + new OrganizationResourceInner().withLocation("West US") + .withTags(mapOf("Environment", "Dev")) + .withOfferDetail(new OfferDetail().withPublisherId("string") + .withId("string") + .withPlanId("string") + .withPlanName("string") + .withTermUnit("string") + .withPrivateOfferId("string") + .withPrivateOfferIds(Arrays.asList("string"))) + .withUserDetail(new UserDetail().withFirstName("string") + .withLastName("string") + .withEmailAddress("abc@microsoft.com") + .withUserPrincipalName("abc@microsoft.com") + .withAadEmail("abc@microsoft.com")), com.azure.core.util.Context.NONE); } @@ -384,70 +340,53 @@ public final class AccessListServiceAccountsSamples { } ``` -### Access_ListUsers +### Access_ListServiceAccounts ```java -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.util.HashMap; -import java.util.Map; /** - * Samples for Access ListUsers. + * Samples for MarketplaceAgreements Create. */ -public final class AccessListUsersSamples { +public final class MarketplaceAgreementsCreateSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_UsersList.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * MarketplaceAgreements_Create.json */ /** - * Sample code: Access_UsersList. + * Sample code: MarketplaceAgreements_Create. * * @param manager Entry point to ConfluentManager. */ - public static void accessUsersList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.access() - .listUsersWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel().withSearchFilters( - mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), com.azure.core.util.Context.NONE); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; + public static void marketplaceAgreementsCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.marketplaceAgreements().createWithResponse(null, com.azure.core.util.Context.NONE); } } ``` -### MarketplaceAgreements_Create +### Access_ListUsers ```java - /** - * Samples for MarketplaceAgreements Create. + * Samples for Cluster Delete. */ -public final class MarketplaceAgreementsCreateSamples { +public final class ClusterDeleteSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * MarketplaceAgreements_Create.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Cluster_Delete.json */ /** - * Sample code: MarketplaceAgreements_Create. + * Sample code: Cluster_Delete. * * @param manager Entry point to ConfluentManager. */ - public static void marketplaceAgreementsCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.marketplaceAgreements().createWithResponse(null, com.azure.core.util.Context.NONE); + public static void clusterDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.clusters() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", com.azure.core.util.Context.NONE); } } ``` -### MarketplaceAgreements_List +### Cluster_CreateOrUpdate ```java /** @@ -455,7 +394,7 @@ public final class MarketplaceAgreementsCreateSamples { */ public final class MarketplaceAgreementsListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * MarketplaceAgreements_List.json */ /** @@ -469,49 +408,62 @@ public final class MarketplaceAgreementsListSamples { } ``` -### Organization_Create +### Cluster_Delete ```java -import com.azure.resourcemanager.confluent.models.LinkOrganization; -import com.azure.resourcemanager.confluent.models.OfferDetail; -import com.azure.resourcemanager.confluent.models.UserDetail; -import java.util.Arrays; +import com.azure.resourcemanager.confluent.models.AccessCreateRoleBindingRequestModel; + +/** + * Samples for Access CreateRoleBinding. + */ +public final class AccessCreateRoleBindingSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_CreateRoleBinding. + * json + */ + /** + * Sample code: Access_CreateRoleBinding. + * + * @param manager Entry point to ConfluentManager. + */ + public static void accessCreateRoleBinding(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .createRoleBindingWithResponse("myResourceGroup", "myOrganization", + new AccessCreateRoleBindingRequestModel().withPrincipal("User:u-111aaa") + .withRoleName("CloudClusterAdmin") + .withCrnPattern( + "crn://confluent.cloud/organization=1111aaaa-11aa-11aa-11aa-111111aaaaaa/environment=env-aaa1111/cloud-cluster=lkc-1111aaa"), + com.azure.core.util.Context.NONE); + } +} +``` + +### Connector_CreateOrUpdate + +```java +import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; import java.util.HashMap; import java.util.Map; /** - * Samples for Organization Create. + * Samples for Access ListServiceAccounts. */ -public final class OrganizationCreateSamples { +public final class AccessListServiceAccountsSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Create.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Access_ServiceAccountsList.json */ /** - * Sample code: Organization_Create. + * Sample code: Access_ServiceAccountsList. * * @param manager Entry point to ConfluentManager. */ - public static void organizationCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .define("myOrganization") - .withRegion("West US") - .withExistingResourceGroup("myResourceGroup") - .withOfferDetail(new OfferDetail().withPublisherId("string") - .withId("string") - .withPlanId("string") - .withPlanName("string") - .withTermUnit("string") - .withPrivateOfferId("string") - .withPrivateOfferIds(Arrays.asList("string"))) - .withUserDetail(new UserDetail().withFirstName("string") - .withLastName("string") - .withEmailAddress("contoso@microsoft.com") - .withUserPrincipalName("contoso@microsoft.com") - .withAadEmail("contoso@microsoft.com")) - .withTags(mapOf("Environment", "Dev")) - .withLinkOrganization(new LinkOrganization().withToken("fakeTokenPlaceholder")) - .create(); + public static void accessServiceAccountsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listServiceAccountsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() + .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + com.azure.core.util.Context.NONE); } // Use "Map.of" if available @@ -528,292 +480,482 @@ public final class OrganizationCreateSamples { } ``` -### Organization_CreateApiKey +### Connector_Delete ```java -import com.azure.resourcemanager.confluent.models.CreateApiKeyModel; - /** - * Samples for Organization CreateApiKey. + * Samples for Organization ListClusters. */ -public final class OrganizationCreateApiKeySamples { +public final class OrganizationListClustersSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_CreateClusterAPIKey.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_ClusterList. + * json */ /** - * Sample code: Organization_CreateAPIKey. + * Sample code: Organization_ListClusters. * * @param manager Entry point to ConfluentManager. */ - public static void organizationCreateAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void organizationListClusters(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() - .createApiKeyWithResponse("myResourceGroup", "myOrganization", "env-12132", "clusterId-123", - new CreateApiKeyModel().withName("CI kafka access key") - .withDescription("This API key provides kafka access to cluster x"), - com.azure.core.util.Context.NONE); + .listClusters("myResourceGroup", "myOrganization", "env-12132", 10, null, com.azure.core.util.Context.NONE); } } ``` -### Organization_Delete +### Connector_Get ```java /** - * Samples for Organization Delete. + * Samples for Organization GetSchemaRegistryClusterById. */ -public final class OrganizationDeleteSamples { +public final class OrganizationGetSchemaRegistryClusterByIdSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Delete.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetSchemaRegistryClusterById.json */ /** - * Sample code: Confluent_Delete. + * Sample code: Organization_GetSchemaRegistryClusterById. * * @param manager Entry point to ConfluentManager. */ - public static void confluentDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations().delete("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE); + public static void + organizationGetSchemaRegistryClusterById(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .getSchemaRegistryClusterByIdWithResponse("myResourceGroup", "myOrganization", "env-stgcczjp2j3", + "lsrc-stgczkq22z", com.azure.core.util.Context.NONE); } } ``` -### Organization_DeleteClusterApiKey +### Connector_List ```java +import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; +import java.util.HashMap; +import java.util.Map; + /** - * Samples for Organization DeleteClusterApiKey. + * Samples for Access ListUsers. */ -public final class OrganizationDeleteClusterApiKeySamples { +public final class AccessListUsersSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_DeleteClusterAPIKey.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_UsersList.json */ /** - * Sample code: Organization_DeleteClusterAPIKey. + * Sample code: Access_UsersList. * * @param manager Entry point to ConfluentManager. */ - public static void organizationDeleteClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .deleteClusterApiKeyWithResponse("myResourceGroup", "myOrganization", "ZFZ6SZZZWGYBEIFB", - com.azure.core.util.Context.NONE); + public static void accessUsersList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listUsersWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel().withSearchFilters( + mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), com.azure.core.util.Context.NONE); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } ``` -### Organization_GetByResourceGroup +### Environment_CreateOrUpdate ```java /** - * Samples for Organization GetByResourceGroup. + * Samples for Access DeleteRoleBinding. */ -public final class OrganizationGetByResourceGroupSamples { +public final class AccessDeleteRoleBindingSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Get.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_DeleteRoleBinding. + * json */ /** - * Sample code: Organization_Get. + * Sample code: Access_DeleteRoleBinding. * * @param manager Entry point to ConfluentManager. */ - public static void organizationGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .getByResourceGroupWithResponse("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE); + public static void accessDeleteRoleBinding(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .deleteRoleBindingWithResponse("myResourceGroup", "myOrganization", "dlz-f3a90de", + com.azure.core.util.Context.NONE); } } ``` -### Organization_GetClusterApiKey +### Environment_Delete ```java +import com.azure.resourcemanager.confluent.models.TopicsInputConfig; +import java.util.Arrays; + /** - * Samples for Organization GetClusterApiKey. + * Samples for Topics Create. */ -public final class OrganizationGetClusterApiKeySamples { +public final class TopicsCreateSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_GetClusterAPIKey.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Create.json */ /** - * Sample code: Organization_GetClusterAPIKey. + * Sample code: Topics_Create. * * @param manager Entry point to ConfluentManager. */ - public static void organizationGetClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .getClusterApiKeyWithResponse("myResourceGroup", "myOrganization", "apiKeyId-123", - com.azure.core.util.Context.NONE); + public static void topicsCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .define("topic-1") + .withExistingCluster("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de") + .withInputConfigs(Arrays.asList(new TopicsInputConfig().withName("cleanup.policy").withValue("compact"), + new TopicsInputConfig().withName("retention.ms").withValue("86400000"))) + .withPartitionsCount("1") + .withReplicationFactor("3") + .create(); } } ``` -### Organization_GetClusterById +### MarketplaceAgreements_Create ```java /** - * Samples for Organization GetClusterById. + * Samples for Connector List. */ -public final class OrganizationGetClusterByIdSamples { +public final class ConnectorListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_GetClusterById.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_ConnectorList.json */ /** - * Sample code: Organization_GetClusterById. + * Sample code: Connector_List. * * @param manager Entry point to ConfluentManager. */ - public static void organizationGetClusterById(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .getClusterByIdWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", + public static void connectorList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .list("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", 10, null, com.azure.core.util.Context.NONE); } } ``` -### Organization_GetEnvironmentById +### MarketplaceAgreements_List ```java /** - * Samples for Organization GetEnvironmentById. + * Samples for Organization ListEnvironments. */ -public final class OrganizationGetEnvironmentByIdSamples { +public final class OrganizationListEnvironmentsSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_GetEnvironmentById.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_EnvironmentList.json */ /** - * Sample code: Organization_GetEnvironmentById. + * Sample code: Organization_ListEnvironments. * * @param manager Entry point to ConfluentManager. */ - public static void organizationGetEnvironmentById(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void organizationListEnvironments(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() - .getEnvironmentByIdWithResponse("myResourceGroup", "myOrganization", "dlz-f3a90de", - com.azure.core.util.Context.NONE); + .listEnvironments("myResourceGroup", "myOrganization", 10, null, com.azure.core.util.Context.NONE); } } ``` -### Organization_GetSchemaRegistryClusterById +### Organization_Create ```java +import com.azure.resourcemanager.confluent.fluent.models.OrganizationResourceInner; +import com.azure.resourcemanager.confluent.models.OfferDetail; +import com.azure.resourcemanager.confluent.models.UserDetail; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + /** - * Samples for Organization GetSchemaRegistryClusterById. + * Samples for Validations ValidateOrganizationV2. */ -public final class OrganizationGetSchemaRegistryClusterByIdSamples { +public final class ValidationsValidateOrganizationV2Samples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_GetSchemaRegistryClusterById.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Validations_ValidateOrganizationsV2.json */ /** - * Sample code: Organization_GetSchemaRegistryClusterById. + * Sample code: Validations_ValidateOrganizations. * * @param manager Entry point to ConfluentManager. */ - public static void - organizationGetSchemaRegistryClusterById(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .getSchemaRegistryClusterByIdWithResponse("myResourceGroup", "myOrganization", "env-stgcczjp2j3", - "lsrc-stgczkq22z", com.azure.core.util.Context.NONE); + public static void validationsValidateOrganizations(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.validations() + .validateOrganizationV2WithResponse("myResourceGroup", "myOrganization", + new OrganizationResourceInner().withLocation("West US") + .withTags(mapOf("Environment", "Dev")) + .withOfferDetail(new OfferDetail().withPublisherId("string") + .withId("string") + .withPlanId("string") + .withPlanName("string") + .withTermUnit("string") + .withPrivateOfferId("string") + .withPrivateOfferIds(Arrays.asList("string"))) + .withUserDetail(new UserDetail().withFirstName("string") + .withLastName("string") + .withEmailAddress("abc@microsoft.com") + .withUserPrincipalName("abc@microsoft.com") + .withAadEmail("abc@microsoft.com")), + com.azure.core.util.Context.NONE); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } ``` -### Organization_List +### Organization_CreateApiKey ```java +import com.azure.resourcemanager.confluent.models.OrganizationResource; +import java.util.HashMap; +import java.util.Map; + /** - * Samples for Organization List. + * Samples for Organization Update. */ -public final class OrganizationListSamples { +public final class OrganizationUpdateSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_ListBySubscription.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Update.json */ /** - * Sample code: Organization_ListBySubscription. + * Sample code: Confluent_Update. * * @param manager Entry point to ConfluentManager. */ - public static void organizationListBySubscription(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations().list(com.azure.core.util.Context.NONE); + public static void confluentUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + OrganizationResource resource = manager.organizations() + .getByResourceGroupWithResponse("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE) + .getValue(); + resource.update().withTags(mapOf("client", "dev-client", "env", "dev")).apply(); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } ``` -### Organization_ListByResourceGroup +### Organization_Delete ```java +import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; +import java.util.HashMap; +import java.util.Map; + /** - * Samples for Organization ListByResourceGroup. + * Samples for Access ListClusters. */ -public final class OrganizationListByResourceGroupSamples { +public final class AccessListClustersSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_ListByResourceGroup.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_ClusterList.json */ /** - * Sample code: Organization_ListByResourceGroup. + * Sample code: Access_ClusterList. * * @param manager Entry point to ConfluentManager. */ - public static void organizationListByResourceGroup(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations().listByResourceGroup("myResourceGroup", com.azure.core.util.Context.NONE); + public static void accessClusterList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listClustersWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() + .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), + com.azure.core.util.Context.NONE); + } + + // Use "Map.of" if available + @SuppressWarnings("unchecked") + private static Map mapOf(Object... inputs) { + Map map = new HashMap<>(); + for (int i = 0; i < inputs.length; i += 2) { + String key = (String) inputs[i]; + T value = (T) inputs[i + 1]; + map.put(key, value); + } + return map; } } ``` -### Organization_ListClusters +### Organization_DeleteClusterApiKey ```java /** - * Samples for Organization ListClusters. + * Samples for Organization GetClusterById. */ -public final class OrganizationListClustersSamples { +public final class OrganizationGetClusterByIdSamples { /* - * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_ClusterList. - * json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetClusterById.json */ /** - * Sample code: Organization_ListClusters. + * Sample code: Organization_GetClusterById. * * @param manager Entry point to ConfluentManager. */ - public static void organizationListClusters(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void organizationGetClusterById(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() - .listClusters("myResourceGroup", "myOrganization", "env-12132", 10, null, com.azure.core.util.Context.NONE); + .getClusterByIdWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", + com.azure.core.util.Context.NONE); } } ``` -### Organization_ListEnvironments +### Organization_GetByResourceGroup ```java +import com.azure.resourcemanager.confluent.models.CreateApiKeyModel; + /** - * Samples for Organization ListEnvironments. + * Samples for Organization CreateApiKey. */ -public final class OrganizationListEnvironmentsSamples { +public final class OrganizationCreateApiKeySamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_EnvironmentList.json + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_CreateClusterAPIKey.json */ /** - * Sample code: Organization_ListEnvironments. + * Sample code: Organization_CreateClusterAPIKey. * * @param manager Entry point to ConfluentManager. */ - public static void organizationListEnvironments(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void organizationCreateClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() - .listEnvironments("myResourceGroup", "myOrganization", 10, null, com.azure.core.util.Context.NONE); + .createApiKeyWithResponse("myResourceGroup", "myOrganization", "env-12132", "clusterId-123", + new CreateApiKeyModel().withName("CI kafka access key") + .withDescription("This API key provides kafka access to cluster x"), + com.azure.core.util.Context.NONE); } } ``` -### Organization_ListRegions +### Organization_GetClusterApiKey + +```java +/** + * Samples for Organization Delete. + */ +public final class OrganizationDeleteSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Delete.json + */ + /** + * Sample code: Confluent_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void confluentDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations().delete("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_GetClusterById + +```java +/** + * Samples for Organization List. + */ +public final class OrganizationListSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_ListBySubscription.json + */ + /** + * Sample code: Organization_ListBySubscription. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationListBySubscription(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations().list(com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_GetEnvironmentById + +```java +/** + * Samples for Connector Get. + */ +public final class ConnectorGetSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetConnectorByName.json + */ + /** + * Sample code: Connector_Get. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .getWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "connector-1", + com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_GetSchemaRegistryClusterById + +```java +/** + * Samples for Environment Delete. + */ +public final class EnvironmentDeleteSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Environment_Delete.json + */ + /** + * Sample code: Environment_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void environmentDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.environments() + .delete("myResourceGroup", "myOrganization", "env-12132", com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_List ```java import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; @@ -821,24 +963,23 @@ import java.util.HashMap; import java.util.Map; /** - * Samples for Organization ListRegions. + * Samples for Access ListEnvironments. */ -public final class OrganizationListRegionsSamples { +public final class AccessListEnvironmentsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_ListRegions. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_EnvironmentList. * json */ /** - * Sample code: Organization_ListRegions. + * Sample code: Access_EnvironmentList. * * @param manager Entry point to ConfluentManager. */ - public static void organizationListRegions(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizations() - .listRegionsWithResponse("myResourceGroup", "myOrganization", - new ListAccessRequestModel() - .withSearchFilters(mapOf("cloud", "azure", "packages", "ADVANCED,ESSENTIALS", "region", "eastus")), + public static void accessEnvironmentList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listEnvironmentsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() + .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), com.azure.core.util.Context.NONE); } @@ -856,26 +997,152 @@ public final class OrganizationListRegionsSamples { } ``` -### Organization_ListSchemaRegistryClusters +### Organization_ListByResourceGroup ```java /** - * Samples for Organization ListSchemaRegistryClusters. + * Samples for Topics Delete. */ -public final class OrganizationListSchemaRegistryClustersSamples { +public final class TopicsDeleteSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Organization_ListSchemaRegistryClusters.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Delete.json */ /** - * Sample code: Organization_ListSchemaRegistryClusters. + * Sample code: Topics_Delete. * * @param manager Entry point to ConfluentManager. */ - public static void - organizationListSchemaRegistryClusters(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void topicsDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "topic-1", + com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_ListClusters + +```java +import com.azure.resourcemanager.confluent.models.AuthType; +import com.azure.resourcemanager.confluent.models.AzureBlobStorageSinkConnectorServiceInfo; +import com.azure.resourcemanager.confluent.models.ConnectorClass; +import com.azure.resourcemanager.confluent.models.ConnectorInfoBase; +import com.azure.resourcemanager.confluent.models.ConnectorType; +import com.azure.resourcemanager.confluent.models.DataFormatType; +import com.azure.resourcemanager.confluent.models.KafkaAzureBlobStorageSinkConnectorInfo; +import java.util.Arrays; + +/** + * Samples for Connector CreateOrUpdate. + */ +public final class ConnectorCreateOrUpdateSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_CreateConnectorByName.json + */ + /** + * Sample code: Connector_CreateOrUpdate. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .define("connector-1") + .withExistingCluster("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de") + .withConnectorBasicInfo(new ConnectorInfoBase().withConnectorType(ConnectorType.SINK) + .withConnectorClass(ConnectorClass.fromString("AZUREBLOBSTORAGESINK")) + .withConnectorName("connector-1")) + .withConnectorServiceTypeInfo( + new AzureBlobStorageSinkConnectorServiceInfo().withStorageAccountName("stcfaccount-1") + .withStorageAccountKey("fakeTokenPlaceholder") + .withStorageContainerName("continer-1")) + .withPartnerConnectorInfo(new KafkaAzureBlobStorageSinkConnectorInfo().withAuthType(AuthType.KAFKA_API_KEY) + .withInputFormat(DataFormatType.JSON) + .withOutputFormat(DataFormatType.JSON) + .withApiKey("fakeTokenPlaceholder") + .withApiSecret("fakeTokenPlaceholder") + .withTopics(Arrays.asList("topic-1")) + .withTopicsDir("topicsDir") + .withFlushSize("1000") + .withMaxTasks("2") + .withTimeInterval("DAILY")) + .create(); + } +} +``` + +### Organization_ListEnvironments + +```java +/** + * Samples for Organization ListByResourceGroup. + */ +public final class OrganizationListByResourceGroupSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_ListByResourceGroup.json + */ + /** + * Sample code: Organization_ListByResourceGroup. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationListByResourceGroup(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations().listByResourceGroup("myResourceGroup", com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_ListRegions + +```java +/** + * Samples for Organization DeleteClusterApiKey. + */ +public final class OrganizationDeleteClusterApiKeySamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_DeleteClusterAPIKey.json + */ + /** + * Sample code: Organization_DeleteClusterAPIKey. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationDeleteClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() - .listSchemaRegistryClusters("myResourceGroup", "myOrganization", "env-stgcczjp2j3", null, null, + .deleteClusterApiKeyWithResponse("myResourceGroup", "myOrganization", "ZFZ6SZZZWGYBEIFB", + com.azure.core.util.Context.NONE); + } +} +``` + +### Organization_ListSchemaRegistryClusters + +```java +import com.azure.resourcemanager.confluent.models.AccessInviteUserAccountModel; +import com.azure.resourcemanager.confluent.models.AccessInvitedUserDetails; + +/** + * Samples for Access InviteUser. + */ +public final class AccessInviteUserSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_InviteUser.json + */ + /** + * Sample code: Access_InviteUser. + * + * @param manager Entry point to ConfluentManager. + */ + public static void accessInviteUser(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .inviteUserWithResponse("myResourceGroup", "myOrganization", + new AccessInviteUserAccountModel() + .withInvitedUserDetails(new AccessInvitedUserDetails().withInvitedEmail("user2@onmicrosoft.com") + .withAuthType("AUTH_TYPE_SSO")), com.azure.core.util.Context.NONE); } } @@ -884,28 +1151,30 @@ public final class OrganizationListSchemaRegistryClustersSamples { ### Organization_Update ```java -import com.azure.resourcemanager.confluent.models.OrganizationResource; +import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; import java.util.HashMap; import java.util.Map; /** - * Samples for Organization Update. + * Samples for Access ListInvitations. */ -public final class OrganizationUpdateSamples { +public final class AccessListInvitationsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Update.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_InvitationsList. + * json */ /** - * Sample code: Confluent_Update. + * Sample code: Access_InvitationsList. * * @param manager Entry point to ConfluentManager. */ - public static void confluentUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { - OrganizationResource resource = manager.organizations() - .getByResourceGroupWithResponse("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE) - .getValue(); - resource.update().withTags(mapOf("client", "dev-client", "env", "dev")).apply(); + public static void accessInvitationsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listInvitationsWithResponse("myResourceGroup", "myOrganization", + new ListAccessRequestModel().withSearchFilters( + mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder", "status", "INVITE_STATUS_SENT")), + com.azure.core.util.Context.NONE); } // Use "Map.of" if available @@ -925,29 +1194,36 @@ public final class OrganizationUpdateSamples { ### OrganizationOperations_List ```java +import com.azure.resourcemanager.confluent.models.Package; +import com.azure.resourcemanager.confluent.models.StreamGovernanceConfig; + /** - * Samples for OrganizationOperations List. + * Samples for Environment CreateOrUpdate. */ -public final class OrganizationOperationsListSamples { +public final class EnvironmentCreateOrUpdateSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * OrganizationOperations_List.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Environment_Create.json */ /** - * Sample code: OrganizationOperations_List. + * Sample code: Environment_CreateOrUpdate. * * @param manager Entry point to ConfluentManager. */ - public static void organizationOperationsList(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.organizationOperations().list(com.azure.core.util.Context.NONE); + public static void environmentCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.environments() + .define("env-1") + .withExistingOrganization("myResourceGroup", "myOrganization") + .withStreamGovernanceConfig(new StreamGovernanceConfig().withPackageProperty(Package.ESSENTIALS)) + .create(); } } ``` -### Validations_ValidateOrganization +### Topics_Create ```java -import com.azure.resourcemanager.confluent.fluent.models.OrganizationResourceInner; +import com.azure.resourcemanager.confluent.models.LinkOrganization; import com.azure.resourcemanager.confluent.models.OfferDetail; import com.azure.resourcemanager.confluent.models.UserDetail; import java.util.Arrays; @@ -955,36 +1231,38 @@ import java.util.HashMap; import java.util.Map; /** - * Samples for Validations ValidateOrganization. + * Samples for Organization Create. */ -public final class ValidationsValidateOrganizationSamples { +public final class OrganizationCreateSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Validations_ValidateOrganizations.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Create.json */ /** - * Sample code: Validations_ValidateOrganizations. + * Sample code: Organization_Create. * * @param manager Entry point to ConfluentManager. */ - public static void validationsValidateOrganizations(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.validations() - .validateOrganizationWithResponse("myResourceGroup", "myOrganization", - new OrganizationResourceInner().withLocation("West US") - .withTags(mapOf("Environment", "Dev")) - .withOfferDetail(new OfferDetail().withPublisherId("string") - .withId("string") - .withPlanId("string") - .withPlanName("string") - .withTermUnit("string") - .withPrivateOfferId("string") - .withPrivateOfferIds(Arrays.asList("string"))) - .withUserDetail(new UserDetail().withFirstName("string") - .withLastName("string") - .withEmailAddress("abc@microsoft.com") - .withUserPrincipalName("abc@microsoft.com") - .withAadEmail("abc@microsoft.com")), - com.azure.core.util.Context.NONE); + public static void organizationCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .define("myOrganization") + .withRegion("West US") + .withExistingResourceGroup("myResourceGroup") + .withOfferDetail(new OfferDetail().withPublisherId("string") + .withId("string") + .withPlanId("string") + .withPlanName("string") + .withTermUnit("string") + .withPrivateOfferId("string") + .withPrivateOfferIds(Arrays.asList("string"))) + .withUserDetail(new UserDetail().withFirstName("string") + .withLastName("string") + .withEmailAddress("contoso@microsoft.com") + .withUserPrincipalName("contoso@microsoft.com") + .withAadEmail("contoso@microsoft.com")) + .withTags(mapOf("Environment", "Dev")) + .withLinkOrganization(new LinkOrganization().withToken("fakeTokenPlaceholder")) + .create(); } // Use "Map.of" if available @@ -1001,46 +1279,31 @@ public final class ValidationsValidateOrganizationSamples { } ``` -### Validations_ValidateOrganizationV2 +### Topics_Delete ```java -import com.azure.resourcemanager.confluent.fluent.models.OrganizationResourceInner; -import com.azure.resourcemanager.confluent.models.OfferDetail; -import com.azure.resourcemanager.confluent.models.UserDetail; -import java.util.Arrays; +import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; import java.util.HashMap; import java.util.Map; /** - * Samples for Validations ValidateOrganizationV2. + * Samples for Access ListRoleBindings. */ -public final class ValidationsValidateOrganizationV2Samples { +public final class AccessListRoleBindingsSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ - * Validations_ValidateOrganizationsV2.json + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_RoleBindingList. + * json */ /** - * Sample code: Validations_ValidateOrganizations. + * Sample code: Access_RoleBindingList. * * @param manager Entry point to ConfluentManager. */ - public static void validationsValidateOrganizations(com.azure.resourcemanager.confluent.ConfluentManager manager) { - manager.validations() - .validateOrganizationV2WithResponse("myResourceGroup", "myOrganization", - new OrganizationResourceInner().withLocation("West US") - .withTags(mapOf("Environment", "Dev")) - .withOfferDetail(new OfferDetail().withPublisherId("string") - .withId("string") - .withPlanId("string") - .withPlanName("string") - .withTermUnit("string") - .withPrivateOfferId("string") - .withPrivateOfferIds(Arrays.asList("string"))) - .withUserDetail(new UserDetail().withFirstName("string") - .withLastName("string") - .withEmailAddress("abc@microsoft.com") - .withUserPrincipalName("abc@microsoft.com") - .withAadEmail("abc@microsoft.com")), + public static void accessRoleBindingList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.access() + .listRoleBindingsWithResponse("myResourceGroup", "myOrganization", new ListAccessRequestModel() + .withSearchFilters(mapOf("pageSize", "10", "pageToken", "fakeTokenPlaceholder")), com.azure.core.util.Context.NONE); } @@ -1058,3 +1321,99 @@ public final class ValidationsValidateOrganizationV2Samples { } ``` +### Topics_Get + +```java +/** + * Samples for Organization GetEnvironmentById. + */ +public final class OrganizationGetEnvironmentByIdSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetEnvironmentById.json + */ + /** + * Sample code: Organization_GetEnvironmentById. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationGetEnvironmentById(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .getEnvironmentByIdWithResponse("myResourceGroup", "myOrganization", "dlz-f3a90de", + com.azure.core.util.Context.NONE); + } +} +``` + +### Topics_List + +```java +/** + * Samples for Organization GetClusterApiKey. + */ +public final class OrganizationGetClusterApiKeySamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetClusterAPIKey.json + */ + /** + * Sample code: Organization_GetClusterAPIKey. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationGetClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .getClusterApiKeyWithResponse("myResourceGroup", "myOrganization", "apiKeyId-123", + com.azure.core.util.Context.NONE); + } +} +``` + +### Validations_ValidateOrganization + +```java +/** + * Samples for Organization ListSchemaRegistryClusters. + */ +public final class OrganizationListSchemaRegistryClustersSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_ListSchemaRegistryClusters.json + */ + /** + * Sample code: Organization_ListSchemaRegistryClusters. + * + * @param manager Entry point to ConfluentManager. + */ + public static void + organizationListSchemaRegistryClusters(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .listSchemaRegistryClusters("myResourceGroup", "myOrganization", "env-stgcczjp2j3", null, null, + com.azure.core.util.Context.NONE); + } +} +``` + +### Validations_ValidateOrganizationV2 + +```java +/** + * Samples for Organization GetByResourceGroup. + */ +public final class OrganizationGetByResourceGroupSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Get.json + */ + /** + * Sample code: Organization_Get. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.organizations() + .getByResourceGroupWithResponse("myResourceGroup", "myOrganization", com.azure.core.util.Context.NONE); + } +} +``` + diff --git a/sdk/confluent/azure-resourcemanager-confluent/pom.xml b/sdk/confluent/azure-resourcemanager-confluent/pom.xml index f90bdcb727fa..ce5448999f3e 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/pom.xml +++ b/sdk/confluent/azure-resourcemanager-confluent/pom.xml @@ -18,7 +18,7 @@ jar Microsoft Azure SDK for Confluent Management - This package contains Microsoft Azure SDK for Confluent Management SDK. For documentation on how to use this package, please see https://aka.ms/azsdk/java/mgmt. Package tag package-2024-02. + This package contains Microsoft Azure SDK for Confluent Management SDK. For documentation on how to use this package, please see https://aka.ms/azsdk/java/mgmt. Package tag package-2024-07. https://github.com/Azure/azure-sdk-for-java @@ -45,7 +45,7 @@ UTF-8 0 0 - false + true diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/ConfluentManager.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/ConfluentManager.java index 49f70d3bf923..c68baca20749 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/ConfluentManager.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/ConfluentManager.java @@ -22,23 +22,33 @@ import com.azure.core.http.policy.UserAgentPolicy; import com.azure.core.management.profile.AzureProfile; import com.azure.core.util.Configuration; +import com.azure.core.util.CoreUtils; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.confluent.fluent.ConfluentManagementClient; import com.azure.resourcemanager.confluent.implementation.AccessImpl; +import com.azure.resourcemanager.confluent.implementation.ClustersImpl; import com.azure.resourcemanager.confluent.implementation.ConfluentManagementClientBuilder; +import com.azure.resourcemanager.confluent.implementation.ConnectorsImpl; +import com.azure.resourcemanager.confluent.implementation.EnvironmentsImpl; import com.azure.resourcemanager.confluent.implementation.MarketplaceAgreementsImpl; import com.azure.resourcemanager.confluent.implementation.OrganizationOperationsImpl; import com.azure.resourcemanager.confluent.implementation.OrganizationsImpl; +import com.azure.resourcemanager.confluent.implementation.TopicsImpl; import com.azure.resourcemanager.confluent.implementation.ValidationsImpl; import com.azure.resourcemanager.confluent.models.Access; +import com.azure.resourcemanager.confluent.models.Clusters; +import com.azure.resourcemanager.confluent.models.Connectors; +import com.azure.resourcemanager.confluent.models.Environments; import com.azure.resourcemanager.confluent.models.MarketplaceAgreements; import com.azure.resourcemanager.confluent.models.OrganizationOperations; import com.azure.resourcemanager.confluent.models.Organizations; +import com.azure.resourcemanager.confluent.models.Topics; import com.azure.resourcemanager.confluent.models.Validations; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; @@ -56,6 +66,14 @@ public final class ConfluentManager { private Access access; + private Environments environments; + + private Clusters clusters; + + private Connectors connectors; + + private Topics topics; + private final ConfluentManagementClient clientObject; private ConfluentManager(HttpPipeline httpPipeline, AzureProfile profile, Duration defaultPollInterval) { @@ -108,6 +126,9 @@ public static Configurable configure() { */ public static final class Configurable { private static final ClientLogger LOGGER = new ClientLogger(Configurable.class); + private static final String SDK_VERSION = "version"; + private static final Map PROPERTIES + = CoreUtils.getProperties("azure-resourcemanager-confluent.properties"); private HttpClient httpClient; private HttpLogOptions httpLogOptions; @@ -215,12 +236,14 @@ public ConfluentManager authenticate(TokenCredential credential, AzureProfile pr Objects.requireNonNull(credential, "'credential' cannot be null."); Objects.requireNonNull(profile, "'profile' cannot be null."); + String clientVersion = PROPERTIES.getOrDefault(SDK_VERSION, "UnknownVersion"); + StringBuilder userAgentBuilder = new StringBuilder(); userAgentBuilder.append("azsdk-java") .append("-") .append("com.azure.resourcemanager.confluent") .append("/") - .append("1.2.0"); + .append(clientVersion); if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) { userAgentBuilder.append(" (") .append(Configuration.getGlobalConfiguration().get("java.version")) @@ -327,6 +350,54 @@ public Access access() { return access; } + /** + * Gets the resource collection API of Environments. It manages SCEnvironmentRecord. + * + * @return Resource collection API of Environments. + */ + public Environments environments() { + if (this.environments == null) { + this.environments = new EnvironmentsImpl(clientObject.getEnvironments(), this); + } + return environments; + } + + /** + * Gets the resource collection API of Clusters. It manages SCClusterRecord. + * + * @return Resource collection API of Clusters. + */ + public Clusters clusters() { + if (this.clusters == null) { + this.clusters = new ClustersImpl(clientObject.getClusters(), this); + } + return clusters; + } + + /** + * Gets the resource collection API of Connectors. It manages ConnectorResource. + * + * @return Resource collection API of Connectors. + */ + public Connectors connectors() { + if (this.connectors == null) { + this.connectors = new ConnectorsImpl(clientObject.getConnectors(), this); + } + return connectors; + } + + /** + * Gets the resource collection API of Topics. It manages TopicRecord. + * + * @return Resource collection API of Topics. + */ + public Topics topics() { + if (this.topics == null) { + this.topics = new TopicsImpl(clientObject.getTopics(), this); + } + return topics; + } + /** * Gets wrapped service client ConfluentManagementClient providing direct access to the underlying auto-generated * API implementation, based on Azure REST API. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ClustersClient.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ClustersClient.java new file mode 100644 index 000000000000..d92a90c51822 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ClustersClient.java @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent; + +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.http.rest.Response; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; + +/** + * An instance of this class provides access to all the operations defined in ClustersClient. + */ +public interface ClustersClient { + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param body Confluent Cluster resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response createOrUpdateWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, SCClusterRecordInner body, Context context); + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + SCClusterRecordInner createOrUpdate(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId); + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Context context); + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId); + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConfluentManagementClient.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConfluentManagementClient.java index 9af539dd74ab..049c6de91f20 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConfluentManagementClient.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConfluentManagementClient.java @@ -80,4 +80,32 @@ public interface ConfluentManagementClient { * @return the AccessClient object. */ AccessClient getAccess(); + + /** + * Gets the EnvironmentsClient object to access its operations. + * + * @return the EnvironmentsClient object. + */ + EnvironmentsClient getEnvironments(); + + /** + * Gets the ClustersClient object to access its operations. + * + * @return the ClustersClient object. + */ + ClustersClient getClusters(); + + /** + * Gets the ConnectorsClient object to access its operations. + * + * @return the ConnectorsClient object. + */ + ConnectorsClient getConnectors(); + + /** + * Gets the TopicsClient object to access its operations. + * + * @return the TopicsClient object. + */ + TopicsClient getTopics(); } diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConnectorsClient.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConnectorsClient.java new file mode 100644 index 000000000000..5e6ff6f70a75 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/ConnectorsClient.java @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent; + +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; + +/** + * An instance of this class provides access to all the operations defined in ConnectorsClient. + */ +public interface ConnectorsClient { + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response getWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context); + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + ConnectorResourceInner get(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName); + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param body Confluent Connector resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response createOrUpdateWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, ConnectorResourceInner body, Context context); + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + ConnectorResourceInner createOrUpdate(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName, Context context); + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/EnvironmentsClient.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/EnvironmentsClient.java new file mode 100644 index 000000000000..78f6cf4f22a2 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/EnvironmentsClient.java @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent; + +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.http.rest.Response; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.models.SCEnvironmentRecordInner; + +/** + * An instance of this class provides access to all the operations defined in EnvironmentsClient. + */ +public interface EnvironmentsClient { + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param body Confluent Environment resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment along with + * {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response createOrUpdateWithResponse(String resourceGroupName, String organizationName, + String environmentId, SCEnvironmentRecordInner body, Context context); + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + SCEnvironmentRecordInner createOrUpdate(String resourceGroupName, String organizationName, String environmentId); + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId); + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, Context context); + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId); + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/TopicsClient.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/TopicsClient.java new file mode 100644 index 000000000000..58477c42694a --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/TopicsClient.java @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent; + +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; + +/** + * An instance of this class provides access to all the operations defined in TopicsClient. + */ +public interface TopicsClient { + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context); + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response getWithResponse(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName, Context context); + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + TopicRecordInner get(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName); + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param body Confluent Topics resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + Response createWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, TopicRecordInner body, Context context); + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + TopicRecordInner create(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName, Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ClusterProperties.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ClusterProperties.java index 1a24be556c4e..cd7054865679 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ClusterProperties.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ClusterProperties.java @@ -15,7 +15,7 @@ import java.io.IOException; /** - * Cluster Properties. + * Service Connector Cluster Properties. */ @Fluent public final class ClusterProperties implements JsonSerializable { diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceInner.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceInner.java new file mode 100644 index 000000000000..8e74a8763ccc --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceInner.java @@ -0,0 +1,233 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.management.ProxyResource; +import com.azure.core.management.SystemData; +import com.azure.core.util.logging.ClientLogger; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.confluent.models.ConnectorInfoBase; +import com.azure.resourcemanager.confluent.models.ConnectorServiceTypeInfoBase; +import com.azure.resourcemanager.confluent.models.PartnerInfoBase; +import java.io.IOException; + +/** + * Details of connector record. + */ +@Fluent +public final class ConnectorResourceInner extends ProxyResource { + /* + * The properties of the Connector + */ + private ConnectorResourceProperties innerProperties = new ConnectorResourceProperties(); + + /* + * Azure Resource Manager metadata containing createdBy and modifiedBy information. + */ + private SystemData systemData; + + /* + * The type of the resource. + */ + private String type; + + /* + * The name of the resource. + */ + private String name; + + /* + * Fully qualified resource Id for the resource. + */ + private String id; + + /** + * Creates an instance of ConnectorResourceInner class. + */ + public ConnectorResourceInner() { + } + + /** + * Get the innerProperties property: The properties of the Connector. + * + * @return the innerProperties value. + */ + private ConnectorResourceProperties innerProperties() { + return this.innerProperties; + } + + /** + * Get the systemData property: Azure Resource Manager metadata containing createdBy and modifiedBy information. + * + * @return the systemData value. + */ + public SystemData systemData() { + return this.systemData; + } + + /** + * Get the type property: The type of the resource. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the name property: The name of the resource. + * + * @return the name value. + */ + @Override + public String name() { + return this.name; + } + + /** + * Get the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + @Override + public String id() { + return this.id; + } + + /** + * Get the connectorBasicInfo property: Connector Info Base. + * + * @return the connectorBasicInfo value. + */ + public ConnectorInfoBase connectorBasicInfo() { + return this.innerProperties() == null ? null : this.innerProperties().connectorBasicInfo(); + } + + /** + * Set the connectorBasicInfo property: Connector Info Base. + * + * @param connectorBasicInfo the connectorBasicInfo value to set. + * @return the ConnectorResourceInner object itself. + */ + public ConnectorResourceInner withConnectorBasicInfo(ConnectorInfoBase connectorBasicInfo) { + if (this.innerProperties() == null) { + this.innerProperties = new ConnectorResourceProperties(); + } + this.innerProperties().withConnectorBasicInfo(connectorBasicInfo); + return this; + } + + /** + * Get the connectorServiceTypeInfo property: Connector Service type info base properties. + * + * @return the connectorServiceTypeInfo value. + */ + public ConnectorServiceTypeInfoBase connectorServiceTypeInfo() { + return this.innerProperties() == null ? null : this.innerProperties().connectorServiceTypeInfo(); + } + + /** + * Set the connectorServiceTypeInfo property: Connector Service type info base properties. + * + * @param connectorServiceTypeInfo the connectorServiceTypeInfo value to set. + * @return the ConnectorResourceInner object itself. + */ + public ConnectorResourceInner withConnectorServiceTypeInfo(ConnectorServiceTypeInfoBase connectorServiceTypeInfo) { + if (this.innerProperties() == null) { + this.innerProperties = new ConnectorResourceProperties(); + } + this.innerProperties().withConnectorServiceTypeInfo(connectorServiceTypeInfo); + return this; + } + + /** + * Get the partnerConnectorInfo property: The connection information consumed by applications. + * + * @return the partnerConnectorInfo value. + */ + public PartnerInfoBase partnerConnectorInfo() { + return this.innerProperties() == null ? null : this.innerProperties().partnerConnectorInfo(); + } + + /** + * Set the partnerConnectorInfo property: The connection information consumed by applications. + * + * @param partnerConnectorInfo the partnerConnectorInfo value to set. + * @return the ConnectorResourceInner object itself. + */ + public ConnectorResourceInner withPartnerConnectorInfo(PartnerInfoBase partnerConnectorInfo) { + if (this.innerProperties() == null) { + this.innerProperties = new ConnectorResourceProperties(); + } + this.innerProperties().withPartnerConnectorInfo(partnerConnectorInfo); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (innerProperties() == null) { + throw LOGGER.atError() + .log(new IllegalArgumentException( + "Missing required property innerProperties in model ConnectorResourceInner")); + } else { + innerProperties().validate(); + } + } + + private static final ClientLogger LOGGER = new ClientLogger(ConnectorResourceInner.class); + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("properties", this.innerProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConnectorResourceInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConnectorResourceInner if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the ConnectorResourceInner. + */ + public static ConnectorResourceInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConnectorResourceInner deserializedConnectorResourceInner = new ConnectorResourceInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedConnectorResourceInner.id = reader.getString(); + } else if ("name".equals(fieldName)) { + deserializedConnectorResourceInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedConnectorResourceInner.type = reader.getString(); + } else if ("properties".equals(fieldName)) { + deserializedConnectorResourceInner.innerProperties = ConnectorResourceProperties.fromJson(reader); + } else if ("systemData".equals(fieldName)) { + deserializedConnectorResourceInner.systemData = SystemData.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedConnectorResourceInner; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceProperties.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceProperties.java new file mode 100644 index 000000000000..524c7514f1db --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/ConnectorResourceProperties.java @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.confluent.models.ConnectorInfoBase; +import com.azure.resourcemanager.confluent.models.ConnectorServiceTypeInfoBase; +import com.azure.resourcemanager.confluent.models.PartnerInfoBase; +import java.io.IOException; + +/** + * The resource properties of the Connector. + */ +@Fluent +public final class ConnectorResourceProperties implements JsonSerializable { + /* + * Connector Info Base + */ + private ConnectorInfoBase connectorBasicInfo; + + /* + * Connector Service type info base properties. + */ + private ConnectorServiceTypeInfoBase connectorServiceTypeInfo; + + /* + * The connection information consumed by applications. + */ + private PartnerInfoBase partnerConnectorInfo; + + /** + * Creates an instance of ConnectorResourceProperties class. + */ + public ConnectorResourceProperties() { + } + + /** + * Get the connectorBasicInfo property: Connector Info Base. + * + * @return the connectorBasicInfo value. + */ + public ConnectorInfoBase connectorBasicInfo() { + return this.connectorBasicInfo; + } + + /** + * Set the connectorBasicInfo property: Connector Info Base. + * + * @param connectorBasicInfo the connectorBasicInfo value to set. + * @return the ConnectorResourceProperties object itself. + */ + public ConnectorResourceProperties withConnectorBasicInfo(ConnectorInfoBase connectorBasicInfo) { + this.connectorBasicInfo = connectorBasicInfo; + return this; + } + + /** + * Get the connectorServiceTypeInfo property: Connector Service type info base properties. + * + * @return the connectorServiceTypeInfo value. + */ + public ConnectorServiceTypeInfoBase connectorServiceTypeInfo() { + return this.connectorServiceTypeInfo; + } + + /** + * Set the connectorServiceTypeInfo property: Connector Service type info base properties. + * + * @param connectorServiceTypeInfo the connectorServiceTypeInfo value to set. + * @return the ConnectorResourceProperties object itself. + */ + public ConnectorResourceProperties + withConnectorServiceTypeInfo(ConnectorServiceTypeInfoBase connectorServiceTypeInfo) { + this.connectorServiceTypeInfo = connectorServiceTypeInfo; + return this; + } + + /** + * Get the partnerConnectorInfo property: The connection information consumed by applications. + * + * @return the partnerConnectorInfo value. + */ + public PartnerInfoBase partnerConnectorInfo() { + return this.partnerConnectorInfo; + } + + /** + * Set the partnerConnectorInfo property: The connection information consumed by applications. + * + * @param partnerConnectorInfo the partnerConnectorInfo value to set. + * @return the ConnectorResourceProperties object itself. + */ + public ConnectorResourceProperties withPartnerConnectorInfo(PartnerInfoBase partnerConnectorInfo) { + this.partnerConnectorInfo = partnerConnectorInfo; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (connectorBasicInfo() != null) { + connectorBasicInfo().validate(); + } + if (connectorServiceTypeInfo() != null) { + connectorServiceTypeInfo().validate(); + } + if (partnerConnectorInfo() != null) { + partnerConnectorInfo().validate(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("connectorBasicInfo", this.connectorBasicInfo); + jsonWriter.writeJsonField("connectorServiceTypeInfo", this.connectorServiceTypeInfo); + jsonWriter.writeJsonField("partnerConnectorInfo", this.partnerConnectorInfo); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConnectorResourceProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConnectorResourceProperties if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ConnectorResourceProperties. + */ + public static ConnectorResourceProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConnectorResourceProperties deserializedConnectorResourceProperties = new ConnectorResourceProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorBasicInfo".equals(fieldName)) { + deserializedConnectorResourceProperties.connectorBasicInfo = ConnectorInfoBase.fromJson(reader); + } else if ("connectorServiceTypeInfo".equals(fieldName)) { + deserializedConnectorResourceProperties.connectorServiceTypeInfo + = ConnectorServiceTypeInfoBase.fromJson(reader); + } else if ("partnerConnectorInfo".equals(fieldName)) { + deserializedConnectorResourceProperties.partnerConnectorInfo = PartnerInfoBase.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedConnectorResourceProperties; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/EnvironmentProperties.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/EnvironmentProperties.java index a33f0ec55f93..b8323d36c0d9 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/EnvironmentProperties.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/EnvironmentProperties.java @@ -10,6 +10,7 @@ import com.azure.json.JsonToken; import com.azure.json.JsonWriter; import com.azure.resourcemanager.confluent.models.SCMetadataEntity; +import com.azure.resourcemanager.confluent.models.StreamGovernanceConfig; import java.io.IOException; /** @@ -17,6 +18,11 @@ */ @Fluent public final class EnvironmentProperties implements JsonSerializable { + /* + * Stream governance configuration + */ + private StreamGovernanceConfig streamGovernanceConfig; + /* * Metadata of the record */ @@ -28,6 +34,26 @@ public final class EnvironmentProperties implements JsonSerializable { +public final class SCClusterRecordInner extends ProxyResource { /* * Type of cluster */ @@ -29,6 +29,11 @@ public final class SCClusterRecordInner implements JsonSerializable { +public final class SCEnvironmentRecordInner extends ProxyResource { /* * Type of environment */ @@ -27,6 +28,11 @@ public final class SCEnvironmentRecordInner implements JsonSerializable { + /* + * Type of topic + */ + private String kind; + + /* + * Topic Id returned by Confluent + */ + private String topicId; + + /* + * Metadata of the record + */ + private TopicMetadataEntity metadata; + + /* + * Partition Specification of the topic + */ + private TopicsRelatedLink partitions; + + /* + * Config Specification of the topic + */ + private TopicsRelatedLink configs; + + /* + * Input Config Specification of the topic + */ + private List inputConfigs; + + /* + * Partition Reassignment Specification of the topic + */ + private TopicsRelatedLink partitionsReassignments; + + /* + * Partition count of the topic + */ + private String partitionsCount; + + /* + * Replication factor of the topic + */ + private String replicationFactor; + + /** + * Creates an instance of TopicProperties class. + */ + public TopicProperties() { + } + + /** + * Get the kind property: Type of topic. + * + * @return the kind value. + */ + public String kind() { + return this.kind; + } + + /** + * Set the kind property: Type of topic. + * + * @param kind the kind value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withKind(String kind) { + this.kind = kind; + return this; + } + + /** + * Get the topicId property: Topic Id returned by Confluent. + * + * @return the topicId value. + */ + public String topicId() { + return this.topicId; + } + + /** + * Set the topicId property: Topic Id returned by Confluent. + * + * @param topicId the topicId value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withTopicId(String topicId) { + this.topicId = topicId; + return this; + } + + /** + * Get the metadata property: Metadata of the record. + * + * @return the metadata value. + */ + public TopicMetadataEntity metadata() { + return this.metadata; + } + + /** + * Set the metadata property: Metadata of the record. + * + * @param metadata the metadata value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withMetadata(TopicMetadataEntity metadata) { + this.metadata = metadata; + return this; + } + + /** + * Get the partitions property: Partition Specification of the topic. + * + * @return the partitions value. + */ + public TopicsRelatedLink partitions() { + return this.partitions; + } + + /** + * Set the partitions property: Partition Specification of the topic. + * + * @param partitions the partitions value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withPartitions(TopicsRelatedLink partitions) { + this.partitions = partitions; + return this; + } + + /** + * Get the configs property: Config Specification of the topic. + * + * @return the configs value. + */ + public TopicsRelatedLink configs() { + return this.configs; + } + + /** + * Set the configs property: Config Specification of the topic. + * + * @param configs the configs value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withConfigs(TopicsRelatedLink configs) { + this.configs = configs; + return this; + } + + /** + * Get the inputConfigs property: Input Config Specification of the topic. + * + * @return the inputConfigs value. + */ + public List inputConfigs() { + return this.inputConfigs; + } + + /** + * Set the inputConfigs property: Input Config Specification of the topic. + * + * @param inputConfigs the inputConfigs value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withInputConfigs(List inputConfigs) { + this.inputConfigs = inputConfigs; + return this; + } + + /** + * Get the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @return the partitionsReassignments value. + */ + public TopicsRelatedLink partitionsReassignments() { + return this.partitionsReassignments; + } + + /** + * Set the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @param partitionsReassignments the partitionsReassignments value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withPartitionsReassignments(TopicsRelatedLink partitionsReassignments) { + this.partitionsReassignments = partitionsReassignments; + return this; + } + + /** + * Get the partitionsCount property: Partition count of the topic. + * + * @return the partitionsCount value. + */ + public String partitionsCount() { + return this.partitionsCount; + } + + /** + * Set the partitionsCount property: Partition count of the topic. + * + * @param partitionsCount the partitionsCount value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withPartitionsCount(String partitionsCount) { + this.partitionsCount = partitionsCount; + return this; + } + + /** + * Get the replicationFactor property: Replication factor of the topic. + * + * @return the replicationFactor value. + */ + public String replicationFactor() { + return this.replicationFactor; + } + + /** + * Set the replicationFactor property: Replication factor of the topic. + * + * @param replicationFactor the replicationFactor value to set. + * @return the TopicProperties object itself. + */ + public TopicProperties withReplicationFactor(String replicationFactor) { + this.replicationFactor = replicationFactor; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (metadata() != null) { + metadata().validate(); + } + if (partitions() != null) { + partitions().validate(); + } + if (configs() != null) { + configs().validate(); + } + if (inputConfigs() != null) { + inputConfigs().forEach(e -> e.validate()); + } + if (partitionsReassignments() != null) { + partitionsReassignments().validate(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("kind", this.kind); + jsonWriter.writeStringField("topicId", this.topicId); + jsonWriter.writeJsonField("metadata", this.metadata); + jsonWriter.writeJsonField("partitions", this.partitions); + jsonWriter.writeJsonField("configs", this.configs); + jsonWriter.writeArrayField("inputConfigs", this.inputConfigs, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeJsonField("partitionsReassignments", this.partitionsReassignments); + jsonWriter.writeStringField("partitionsCount", this.partitionsCount); + jsonWriter.writeStringField("replicationFactor", this.replicationFactor); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TopicProperties from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TopicProperties if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TopicProperties. + */ + public static TopicProperties fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TopicProperties deserializedTopicProperties = new TopicProperties(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("kind".equals(fieldName)) { + deserializedTopicProperties.kind = reader.getString(); + } else if ("topicId".equals(fieldName)) { + deserializedTopicProperties.topicId = reader.getString(); + } else if ("metadata".equals(fieldName)) { + deserializedTopicProperties.metadata = TopicMetadataEntity.fromJson(reader); + } else if ("partitions".equals(fieldName)) { + deserializedTopicProperties.partitions = TopicsRelatedLink.fromJson(reader); + } else if ("configs".equals(fieldName)) { + deserializedTopicProperties.configs = TopicsRelatedLink.fromJson(reader); + } else if ("inputConfigs".equals(fieldName)) { + List inputConfigs + = reader.readArray(reader1 -> TopicsInputConfig.fromJson(reader1)); + deserializedTopicProperties.inputConfigs = inputConfigs; + } else if ("partitionsReassignments".equals(fieldName)) { + deserializedTopicProperties.partitionsReassignments = TopicsRelatedLink.fromJson(reader); + } else if ("partitionsCount".equals(fieldName)) { + deserializedTopicProperties.partitionsCount = reader.getString(); + } else if ("replicationFactor".equals(fieldName)) { + deserializedTopicProperties.replicationFactor = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTopicProperties; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/TopicRecordInner.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/TopicRecordInner.java new file mode 100644 index 000000000000..5523cdfc4c3f --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/fluent/models/TopicRecordInner.java @@ -0,0 +1,348 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.fluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.core.management.ProxyResource; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.confluent.models.TopicMetadataEntity; +import com.azure.resourcemanager.confluent.models.TopicsInputConfig; +import com.azure.resourcemanager.confluent.models.TopicsRelatedLink; +import java.io.IOException; +import java.util.List; + +/** + * Details of topic record. + */ +@Fluent +public final class TopicRecordInner extends ProxyResource { + /* + * Topic Properties + */ + private TopicProperties innerProperties; + + /* + * The type of the resource. + */ + private String type; + + /* + * The name of the resource. + */ + private String name; + + /* + * Fully qualified resource Id for the resource. + */ + private String id; + + /** + * Creates an instance of TopicRecordInner class. + */ + public TopicRecordInner() { + } + + /** + * Get the innerProperties property: Topic Properties. + * + * @return the innerProperties value. + */ + private TopicProperties innerProperties() { + return this.innerProperties; + } + + /** + * Get the type property: The type of the resource. + * + * @return the type value. + */ + @Override + public String type() { + return this.type; + } + + /** + * Get the name property: The name of the resource. + * + * @return the name value. + */ + @Override + public String name() { + return this.name; + } + + /** + * Get the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + @Override + public String id() { + return this.id; + } + + /** + * Get the kind property: Type of topic. + * + * @return the kind value. + */ + public String kind() { + return this.innerProperties() == null ? null : this.innerProperties().kind(); + } + + /** + * Set the kind property: Type of topic. + * + * @param kind the kind value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withKind(String kind) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withKind(kind); + return this; + } + + /** + * Get the topicId property: Topic Id returned by Confluent. + * + * @return the topicId value. + */ + public String topicId() { + return this.innerProperties() == null ? null : this.innerProperties().topicId(); + } + + /** + * Set the topicId property: Topic Id returned by Confluent. + * + * @param topicId the topicId value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withTopicId(String topicId) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withTopicId(topicId); + return this; + } + + /** + * Get the metadata property: Metadata of the record. + * + * @return the metadata value. + */ + public TopicMetadataEntity metadata() { + return this.innerProperties() == null ? null : this.innerProperties().metadata(); + } + + /** + * Set the metadata property: Metadata of the record. + * + * @param metadata the metadata value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withMetadata(TopicMetadataEntity metadata) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withMetadata(metadata); + return this; + } + + /** + * Get the partitions property: Partition Specification of the topic. + * + * @return the partitions value. + */ + public TopicsRelatedLink partitions() { + return this.innerProperties() == null ? null : this.innerProperties().partitions(); + } + + /** + * Set the partitions property: Partition Specification of the topic. + * + * @param partitions the partitions value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withPartitions(TopicsRelatedLink partitions) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withPartitions(partitions); + return this; + } + + /** + * Get the configs property: Config Specification of the topic. + * + * @return the configs value. + */ + public TopicsRelatedLink configs() { + return this.innerProperties() == null ? null : this.innerProperties().configs(); + } + + /** + * Set the configs property: Config Specification of the topic. + * + * @param configs the configs value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withConfigs(TopicsRelatedLink configs) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withConfigs(configs); + return this; + } + + /** + * Get the inputConfigs property: Input Config Specification of the topic. + * + * @return the inputConfigs value. + */ + public List inputConfigs() { + return this.innerProperties() == null ? null : this.innerProperties().inputConfigs(); + } + + /** + * Set the inputConfigs property: Input Config Specification of the topic. + * + * @param inputConfigs the inputConfigs value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withInputConfigs(List inputConfigs) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withInputConfigs(inputConfigs); + return this; + } + + /** + * Get the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @return the partitionsReassignments value. + */ + public TopicsRelatedLink partitionsReassignments() { + return this.innerProperties() == null ? null : this.innerProperties().partitionsReassignments(); + } + + /** + * Set the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @param partitionsReassignments the partitionsReassignments value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withPartitionsReassignments(TopicsRelatedLink partitionsReassignments) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withPartitionsReassignments(partitionsReassignments); + return this; + } + + /** + * Get the partitionsCount property: Partition count of the topic. + * + * @return the partitionsCount value. + */ + public String partitionsCount() { + return this.innerProperties() == null ? null : this.innerProperties().partitionsCount(); + } + + /** + * Set the partitionsCount property: Partition count of the topic. + * + * @param partitionsCount the partitionsCount value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withPartitionsCount(String partitionsCount) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withPartitionsCount(partitionsCount); + return this; + } + + /** + * Get the replicationFactor property: Replication factor of the topic. + * + * @return the replicationFactor value. + */ + public String replicationFactor() { + return this.innerProperties() == null ? null : this.innerProperties().replicationFactor(); + } + + /** + * Set the replicationFactor property: Replication factor of the topic. + * + * @param replicationFactor the replicationFactor value to set. + * @return the TopicRecordInner object itself. + */ + public TopicRecordInner withReplicationFactor(String replicationFactor) { + if (this.innerProperties() == null) { + this.innerProperties = new TopicProperties(); + } + this.innerProperties().withReplicationFactor(replicationFactor); + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (innerProperties() != null) { + innerProperties().validate(); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeJsonField("properties", this.innerProperties); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TopicRecordInner from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TopicRecordInner if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IllegalStateException If the deserialized JSON object was missing any required properties. + * @throws IOException If an error occurs while reading the TopicRecordInner. + */ + public static TopicRecordInner fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TopicRecordInner deserializedTopicRecordInner = new TopicRecordInner(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("id".equals(fieldName)) { + deserializedTopicRecordInner.id = reader.getString(); + } else if ("name".equals(fieldName)) { + deserializedTopicRecordInner.name = reader.getString(); + } else if ("type".equals(fieldName)) { + deserializedTopicRecordInner.type = reader.getString(); + } else if ("properties".equals(fieldName)) { + deserializedTopicRecordInner.innerProperties = TopicProperties.fromJson(reader); + } else { + reader.skipChildren(); + } + } + + return deserializedTopicRecordInner; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersClientImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersClientImpl.java new file mode 100644 index 000000000000..482615538ace --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersClientImpl.java @@ -0,0 +1,490 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.annotation.BodyParam; +import com.azure.core.annotation.Delete; +import com.azure.core.annotation.ExpectedResponses; +import com.azure.core.annotation.HeaderParam; +import com.azure.core.annotation.Headers; +import com.azure.core.annotation.Host; +import com.azure.core.annotation.HostParam; +import com.azure.core.annotation.PathParam; +import com.azure.core.annotation.Put; +import com.azure.core.annotation.QueryParam; +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceInterface; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.annotation.UnexpectedResponseExceptionType; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.RestProxy; +import com.azure.core.management.exception.ManagementException; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.FluxUtil; +import com.azure.core.util.polling.PollerFlux; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.ClustersClient; +import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; +import java.nio.ByteBuffer; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * An instance of this class provides access to all the operations defined in ClustersClient. + */ +public final class ClustersClientImpl implements ClustersClient { + /** + * The proxy service used to perform REST calls. + */ + private final ClustersService service; + + /** + * The service client containing this operation class. + */ + private final ConfluentManagementClientImpl client; + + /** + * Initializes an instance of ClustersClientImpl. + * + * @param client the instance of the service client containing this operation class. + */ + ClustersClientImpl(ConfluentManagementClientImpl client) { + this.service = RestProxy.create(ClustersService.class, client.getHttpPipeline(), client.getSerializerAdapter()); + this.client = client; + } + + /** + * The interface defining all the services for ConfluentManagementClientClusters to be used by the proxy service to + * perform REST calls. + */ + @Host("{$host}") + @ServiceInterface(name = "ConfluentManagementC") + public interface ClustersService { + @Headers({ "Content-Type: application/json" }) + @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}") + @ExpectedResponses({ 200, 201 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> createOrUpdate(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @BodyParam("application/json") SCClusterRecordInner body, + @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}") + @ExpectedResponses({ 202, 204 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono>> delete(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @HeaderParam("Accept") String accept, Context context); + } + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param body Confluent Cluster resource model. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, SCClusterRecordInner body) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, body, + accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param body Confluent Cluster resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, SCClusterRecordInner body, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, body, + accept, context); + } + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono createOrUpdateAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + final SCClusterRecordInner body = null; + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, body) + .flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param body Confluent Cluster resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response createOrUpdateWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, SCClusterRecordInner body, Context context) { + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, body, + context).block(); + } + + /** + * Create confluent clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of cluster record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public SCClusterRecordInner createOrUpdate(String resourceGroupName, String organizationName, String environmentId, + String clusterId) { + final SCClusterRecordInner body = null; + return createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, body, + Context.NONE).getValue(); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.delete(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, accept, + context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.delete(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, accept, context); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + this.client.getContext()); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Context context) { + context = this.client.mergeContext(context); + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, context); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + context); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId).getSyncPoller(); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Context context) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, context) + .getSyncPoller(); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Context context) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, context).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId).block(); + } + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + Context context) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId, context).block(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersImpl.java new file mode 100644 index 000000000000..f459a40a59a5 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ClustersImpl.java @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.util.Context; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.confluent.fluent.ClustersClient; +import com.azure.resourcemanager.confluent.models.Clusters; + +public final class ClustersImpl implements Clusters { + private static final ClientLogger LOGGER = new ClientLogger(ClustersImpl.class); + + private final ClustersClient innerClient; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + public ClustersImpl(ClustersClient innerClient, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerClient = innerClient; + this.serviceManager = serviceManager; + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, clusterId); + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + Context context) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, clusterId, context); + } + + public void deleteById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, Context.NONE); + } + + public void deleteByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, context); + } + + private ClustersClient serviceClient() { + return this.innerClient; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + public SCClusterRecordImpl define(String name) { + return new SCClusterRecordImpl(name, this.manager()); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConfluentManagementClientImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConfluentManagementClientImpl.java index ef0ffbf9ae0c..e1f3d54ee2fe 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConfluentManagementClientImpl.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConfluentManagementClientImpl.java @@ -24,10 +24,14 @@ import com.azure.core.util.serializer.SerializerAdapter; import com.azure.core.util.serializer.SerializerEncoding; import com.azure.resourcemanager.confluent.fluent.AccessClient; +import com.azure.resourcemanager.confluent.fluent.ClustersClient; import com.azure.resourcemanager.confluent.fluent.ConfluentManagementClient; +import com.azure.resourcemanager.confluent.fluent.ConnectorsClient; +import com.azure.resourcemanager.confluent.fluent.EnvironmentsClient; import com.azure.resourcemanager.confluent.fluent.MarketplaceAgreementsClient; import com.azure.resourcemanager.confluent.fluent.OrganizationOperationsClient; import com.azure.resourcemanager.confluent.fluent.OrganizationsClient; +import com.azure.resourcemanager.confluent.fluent.TopicsClient; import com.azure.resourcemanager.confluent.fluent.ValidationsClient; import java.io.IOException; import java.lang.reflect.Type; @@ -197,6 +201,62 @@ public AccessClient getAccess() { return this.access; } + /** + * The EnvironmentsClient object to access its operations. + */ + private final EnvironmentsClient environments; + + /** + * Gets the EnvironmentsClient object to access its operations. + * + * @return the EnvironmentsClient object. + */ + public EnvironmentsClient getEnvironments() { + return this.environments; + } + + /** + * The ClustersClient object to access its operations. + */ + private final ClustersClient clusters; + + /** + * Gets the ClustersClient object to access its operations. + * + * @return the ClustersClient object. + */ + public ClustersClient getClusters() { + return this.clusters; + } + + /** + * The ConnectorsClient object to access its operations. + */ + private final ConnectorsClient connectors; + + /** + * Gets the ConnectorsClient object to access its operations. + * + * @return the ConnectorsClient object. + */ + public ConnectorsClient getConnectors() { + return this.connectors; + } + + /** + * The TopicsClient object to access its operations. + */ + private final TopicsClient topics; + + /** + * Gets the TopicsClient object to access its operations. + * + * @return the TopicsClient object. + */ + public TopicsClient getTopics() { + return this.topics; + } + /** * Initializes an instance of ConfluentManagementClient client. * @@ -214,12 +274,16 @@ public AccessClient getAccess() { this.defaultPollInterval = defaultPollInterval; this.subscriptionId = subscriptionId; this.endpoint = endpoint; - this.apiVersion = "2024-02-13"; + this.apiVersion = "2024-07-01"; this.marketplaceAgreements = new MarketplaceAgreementsClientImpl(this); this.organizationOperations = new OrganizationOperationsClientImpl(this); this.organizations = new OrganizationsClientImpl(this); this.validations = new ValidationsClientImpl(this); this.access = new AccessClientImpl(this); + this.environments = new EnvironmentsClientImpl(this); + this.clusters = new ClustersClientImpl(this); + this.connectors = new ConnectorsClientImpl(this); + this.topics = new TopicsClientImpl(this); } /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorResourceImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorResourceImpl.java new file mode 100644 index 000000000000..391e0b4c4075 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorResourceImpl.java @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.management.SystemData; +import com.azure.core.util.Context; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; +import com.azure.resourcemanager.confluent.models.ConnectorInfoBase; +import com.azure.resourcemanager.confluent.models.ConnectorResource; +import com.azure.resourcemanager.confluent.models.ConnectorServiceTypeInfoBase; +import com.azure.resourcemanager.confluent.models.PartnerInfoBase; + +public final class ConnectorResourceImpl + implements ConnectorResource, ConnectorResource.Definition, ConnectorResource.Update { + private ConnectorResourceInner innerObject; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + public String id() { + return this.innerModel().id(); + } + + public String name() { + return this.innerModel().name(); + } + + public String type() { + return this.innerModel().type(); + } + + public SystemData systemData() { + return this.innerModel().systemData(); + } + + public ConnectorInfoBase connectorBasicInfo() { + return this.innerModel().connectorBasicInfo(); + } + + public ConnectorServiceTypeInfoBase connectorServiceTypeInfo() { + return this.innerModel().connectorServiceTypeInfo(); + } + + public PartnerInfoBase partnerConnectorInfo() { + return this.innerModel().partnerConnectorInfo(); + } + + public String resourceGroupName() { + return resourceGroupName; + } + + public ConnectorResourceInner innerModel() { + return this.innerObject; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + private String resourceGroupName; + + private String organizationName; + + private String environmentId; + + private String clusterId; + + private String connectorName; + + public ConnectorResourceImpl withExistingCluster(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + this.resourceGroupName = resourceGroupName; + this.organizationName = organizationName; + this.environmentId = environmentId; + this.clusterId = clusterId; + return this; + } + + public ConnectorResource create() { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + this.innerModel(), Context.NONE) + .getValue(); + return this; + } + + public ConnectorResource create(Context context) { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + this.innerModel(), context) + .getValue(); + return this; + } + + ConnectorResourceImpl(String name, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = new ConnectorResourceInner(); + this.serviceManager = serviceManager; + this.connectorName = name; + } + + public ConnectorResourceImpl update() { + return this; + } + + public ConnectorResource apply() { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + this.innerModel(), Context.NONE) + .getValue(); + return this; + } + + public ConnectorResource apply(Context context) { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + this.innerModel(), context) + .getValue(); + return this; + } + + ConnectorResourceImpl(ConnectorResourceInner innerObject, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = innerObject; + this.serviceManager = serviceManager; + this.resourceGroupName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "resourceGroups"); + this.organizationName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "organizations"); + this.environmentId = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "environments"); + this.clusterId = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "clusters"); + this.connectorName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "connectors"); + } + + public ConnectorResource refresh() { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, Context.NONE) + .getValue(); + return this; + } + + public ConnectorResource refresh(Context context) { + this.innerObject = serviceManager.serviceClient() + .getConnectors() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context) + .getValue(); + return this; + } + + public ConnectorResourceImpl withConnectorBasicInfo(ConnectorInfoBase connectorBasicInfo) { + this.innerModel().withConnectorBasicInfo(connectorBasicInfo); + return this; + } + + public ConnectorResourceImpl withConnectorServiceTypeInfo(ConnectorServiceTypeInfoBase connectorServiceTypeInfo) { + this.innerModel().withConnectorServiceTypeInfo(connectorServiceTypeInfo); + return this; + } + + public ConnectorResourceImpl withPartnerConnectorInfo(PartnerInfoBase partnerConnectorInfo) { + this.innerModel().withPartnerConnectorInfo(partnerConnectorInfo); + return this; + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsClientImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsClientImpl.java new file mode 100644 index 000000000000..1201ea054c62 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsClientImpl.java @@ -0,0 +1,987 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.annotation.BodyParam; +import com.azure.core.annotation.Delete; +import com.azure.core.annotation.ExpectedResponses; +import com.azure.core.annotation.Get; +import com.azure.core.annotation.HeaderParam; +import com.azure.core.annotation.Headers; +import com.azure.core.annotation.Host; +import com.azure.core.annotation.HostParam; +import com.azure.core.annotation.PathParam; +import com.azure.core.annotation.Put; +import com.azure.core.annotation.QueryParam; +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceInterface; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.annotation.UnexpectedResponseExceptionType; +import com.azure.core.http.rest.PagedFlux; +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.PagedResponse; +import com.azure.core.http.rest.PagedResponseBase; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.RestProxy; +import com.azure.core.management.exception.ManagementException; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.FluxUtil; +import com.azure.core.util.polling.PollerFlux; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.ConnectorsClient; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; +import com.azure.resourcemanager.confluent.models.ListConnectorsSuccessResponse; +import java.nio.ByteBuffer; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * An instance of this class provides access to all the operations defined in ConnectorsClient. + */ +public final class ConnectorsClientImpl implements ConnectorsClient { + /** + * The proxy service used to perform REST calls. + */ + private final ConnectorsService service; + + /** + * The service client containing this operation class. + */ + private final ConfluentManagementClientImpl client; + + /** + * Initializes an instance of ConnectorsClientImpl. + * + * @param client the instance of the service client containing this operation class. + */ + ConnectorsClientImpl(ConfluentManagementClientImpl client) { + this.service + = RestProxy.create(ConnectorsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); + this.client = client; + } + + /** + * The interface defining all the services for ConfluentManagementClientConnectors to be used by the proxy service + * to perform REST calls. + */ + @Host("{$host}") + @ServiceInterface(name = "ConfluentManagementC") + public interface ConnectorsService { + @Headers({ "Content-Type: application/json" }) + @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/connectors/{connectorName}") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> get(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("connectorName") String connectorName, + @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/connectors/{connectorName}") + @ExpectedResponses({ 200, 201 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> createOrUpdate(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("connectorName") String connectorName, + @BodyParam("application/json") ConnectorResourceInner body, @HeaderParam("Accept") String accept, + Context context); + + @Headers({ "Content-Type: application/json" }) + @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/connectors/{connectorName}") + @ExpectedResponses({ 202, 204 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono>> delete(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("connectorName") String connectorName, + @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/connectors") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> list(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @QueryParam("pageSize") Integer pageSize, + @QueryParam("pageToken") String pageToken, @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Get("{nextLink}") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> listNext( + @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, + @HeaderParam("Accept") String accept, Context context); + } + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> getWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, String connectorName) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.get(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + connectorName, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> getWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, String connectorName, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.get(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, connectorName, accept, context); + } + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono getAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + return getWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName) + .flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response getWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context) { + return getWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + context).block(); + } + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public ConnectorResourceInner get(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName) { + return getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + Context.NONE).getValue(); + } + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param body Confluent Connector resource model. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, String connectorName, + ConnectorResourceInner body) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + connectorName, body, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param body Confluent Connector resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, String connectorName, + ConnectorResourceInner body, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + connectorName, body, accept, context); + } + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono createOrUpdateAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + final ConnectorResourceInner body = null; + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, + connectorName, body).flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param body Confluent Connector resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response createOrUpdateWithResponse(String resourceGroupName, + String organizationName, String environmentId, String clusterId, String connectorName, + ConnectorResourceInner body, Context context) { + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, + connectorName, body, context).block(); + } + + /** + * Create confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of connector record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public ConnectorResourceInner createOrUpdate(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + final ConnectorResourceInner body = null; + return createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + body, Context.NONE).getValue(); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.delete(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + connectorName, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (connectorName == null) { + return Mono.error(new IllegalArgumentException("Parameter connectorName is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.delete(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, connectorName, accept, context); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + this.client.getContext()); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context) { + context = this.client.mergeContext(context); + Mono>> mono = deleteWithResponseAsync(resourceGroupName, organizationName, + environmentId, clusterId, connectorName, context); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + context); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName) + .getSyncPoller(); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context) { + return this + .beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context) + .getSyncPoller(); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName, Context context) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context) + .last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName).block(); + } + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName, Context context) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context).block(); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listSinglePageAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, Integer pageSize, String pageToken) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.list(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken, accept, context)) + .>map(res -> new PagedResponseBase<>(res.getRequest(), + res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listSinglePageAsync(String resourceGroupName, + String organizationName, String environmentId, String clusterId, Integer pageSize, String pageToken, + Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service + .list(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, accept, context) + .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), + res.getValue().value(), res.getValue().nextLink(), null)); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken) { + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken), nextLink -> listNextSinglePageAsync(nextLink)); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + final Integer pageSize = null; + final String pageToken = null; + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken), nextLink -> listNextSinglePageAsync(nextLink)); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken, Context context) { + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken, context), nextLink -> listNextSinglePageAsync(nextLink, context)); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + public PagedIterable list(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + final Integer pageSize = null; + final String pageToken = null; + return new PagedIterable<>( + listAsync(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken)); + } + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + public PagedIterable list(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken, Context context) { + return new PagedIterable<>( + listAsync(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, context)); + } + + /** + * Get the next page of items. + * + * @param nextLink The URL to get the next list of items. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listNextSinglePageAsync(String nextLink) { + if (nextLink == null) { + return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); + } + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) + .>map(res -> new PagedResponseBase<>(res.getRequest(), + res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Get the next page of items. + * + * @param nextLink The URL to get the next list of items. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listNextSinglePageAsync(String nextLink, Context context) { + if (nextLink == null) { + return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); + } + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.listNext(nextLink, this.client.getEndpoint(), accept, context) + .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), + res.getValue().value(), res.getValue().nextLink(), null)); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsImpl.java new file mode 100644 index 000000000000..68e42a162903 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/ConnectorsImpl.java @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.SimpleResponse; +import com.azure.core.util.Context; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.confluent.fluent.ConnectorsClient; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; +import com.azure.resourcemanager.confluent.models.ConnectorResource; +import com.azure.resourcemanager.confluent.models.Connectors; + +public final class ConnectorsImpl implements Connectors { + private static final ClientLogger LOGGER = new ClientLogger(ConnectorsImpl.class); + + private final ConnectorsClient innerClient; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + public ConnectorsImpl(ConnectorsClient innerClient, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerClient = innerClient; + this.serviceManager = serviceManager; + } + + public Response getWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String connectorName, Context context) { + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context); + if (inner != null) { + return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), + new ConnectorResourceImpl(inner.getValue(), this.manager())); + } else { + return null; + } + } + + public ConnectorResource get(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName) { + ConnectorResourceInner inner + = this.serviceClient().get(resourceGroupName, organizationName, environmentId, clusterId, connectorName); + if (inner != null) { + return new ConnectorResourceImpl(inner, this.manager()); + } else { + return null; + } + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, clusterId, connectorName); + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName, Context context) { + this.serviceClient() + .delete(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context); + } + + public PagedIterable list(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + PagedIterable inner + = this.serviceClient().list(resourceGroupName, organizationName, environmentId, clusterId); + return ResourceManagerUtils.mapPage(inner, inner1 -> new ConnectorResourceImpl(inner1, this.manager())); + } + + public PagedIterable list(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken, Context context) { + PagedIterable inner = this.serviceClient() + .list(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, context); + return ResourceManagerUtils.mapPage(inner, inner1 -> new ConnectorResourceImpl(inner1, this.manager())); + } + + public ConnectorResource getById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String connectorName = ResourceManagerUtils.getValueFromIdByName(id, "connectors"); + if (connectorName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'connectors'.", id))); + } + return this + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, Context.NONE) + .getValue(); + } + + public Response getByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String connectorName = ResourceManagerUtils.getValueFromIdByName(id, "connectors"); + if (connectorName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'connectors'.", id))); + } + return this.getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, connectorName, + context); + } + + public void deleteById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String connectorName = ResourceManagerUtils.getValueFromIdByName(id, "connectors"); + if (connectorName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'connectors'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, connectorName, Context.NONE); + } + + public void deleteByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String connectorName = ResourceManagerUtils.getValueFromIdByName(id, "connectors"); + if (connectorName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'connectors'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, connectorName, context); + } + + private ConnectorsClient serviceClient() { + return this.innerClient; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + public ConnectorResourceImpl define(String name) { + return new ConnectorResourceImpl(name, this.manager()); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsClientImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsClientImpl.java new file mode 100644 index 000000000000..67161454ed81 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsClientImpl.java @@ -0,0 +1,463 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.annotation.BodyParam; +import com.azure.core.annotation.Delete; +import com.azure.core.annotation.ExpectedResponses; +import com.azure.core.annotation.HeaderParam; +import com.azure.core.annotation.Headers; +import com.azure.core.annotation.Host; +import com.azure.core.annotation.HostParam; +import com.azure.core.annotation.PathParam; +import com.azure.core.annotation.Put; +import com.azure.core.annotation.QueryParam; +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceInterface; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.annotation.UnexpectedResponseExceptionType; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.RestProxy; +import com.azure.core.management.exception.ManagementException; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.FluxUtil; +import com.azure.core.util.polling.PollerFlux; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.EnvironmentsClient; +import com.azure.resourcemanager.confluent.fluent.models.SCEnvironmentRecordInner; +import java.nio.ByteBuffer; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * An instance of this class provides access to all the operations defined in EnvironmentsClient. + */ +public final class EnvironmentsClientImpl implements EnvironmentsClient { + /** + * The proxy service used to perform REST calls. + */ + private final EnvironmentsService service; + + /** + * The service client containing this operation class. + */ + private final ConfluentManagementClientImpl client; + + /** + * Initializes an instance of EnvironmentsClientImpl. + * + * @param client the instance of the service client containing this operation class. + */ + EnvironmentsClientImpl(ConfluentManagementClientImpl client) { + this.service + = RestProxy.create(EnvironmentsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); + this.client = client; + } + + /** + * The interface defining all the services for ConfluentManagementClientEnvironments to be used by the proxy service + * to perform REST calls. + */ + @Host("{$host}") + @ServiceInterface(name = "ConfluentManagementC") + public interface EnvironmentsService { + @Headers({ "Content-Type: application/json" }) + @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}") + @ExpectedResponses({ 200, 201 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> createOrUpdate(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @BodyParam("application/json") SCEnvironmentRecordInner body, @HeaderParam("Accept") String accept, + Context context); + + @Headers({ "Content-Type: application/json" }) + @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}") + @ExpectedResponses({ 202, 204 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono>> delete(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @HeaderParam("Accept") String accept, Context context); + } + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param body Confluent Environment resource model. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment along with {@link Response} + * on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, SCEnvironmentRecordInner body) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, body, accept, + context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param body Confluent Environment resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment along with {@link Response} + * on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createOrUpdateWithResponseAsync(String resourceGroupName, + String organizationName, String environmentId, SCEnvironmentRecordInner body, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.createOrUpdate(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, body, accept, context); + } + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment on successful completion of + * {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono createOrUpdateAsync(String resourceGroupName, String organizationName, + String environmentId) { + final SCEnvironmentRecordInner body = null; + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, body) + .flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param body Confluent Environment resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment along with + * {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response createOrUpdateWithResponse(String resourceGroupName, + String organizationName, String environmentId, SCEnvironmentRecordInner body, Context context) { + return createOrUpdateWithResponseAsync(resourceGroupName, organizationName, environmentId, body, context) + .block(); + } + + /** + * Create confluent environment. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details about environment name, metadata and environment id of an environment. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public SCEnvironmentRecordInner createOrUpdate(String resourceGroupName, String organizationName, + String environmentId) { + final SCEnvironmentRecordInner body = null; + return createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, body, Context.NONE) + .getValue(); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.delete(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.delete(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, accept, context); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId) { + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + this.client.getContext()); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, Context context) { + context = this.client.mergeContext(context); + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId, context); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + context); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId).getSyncPoller(); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, Context context) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, context).getSyncPoller(); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + Context context) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, context).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId) { + deleteAsync(resourceGroupName, organizationName, environmentId).block(); + } + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, Context context) { + deleteAsync(resourceGroupName, organizationName, environmentId, context).block(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsImpl.java new file mode 100644 index 000000000000..966b95e897ea --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/EnvironmentsImpl.java @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.util.Context; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.confluent.fluent.EnvironmentsClient; +import com.azure.resourcemanager.confluent.models.Environments; + +public final class EnvironmentsImpl implements Environments { + private static final ClientLogger LOGGER = new ClientLogger(EnvironmentsImpl.class); + + private final EnvironmentsClient innerClient; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + public EnvironmentsImpl(EnvironmentsClient innerClient, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerClient = innerClient; + this.serviceManager = serviceManager; + } + + public void delete(String resourceGroupName, String organizationName, String environmentId) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId); + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, Context context) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, context); + } + + public void deleteById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, Context.NONE); + } + + public void deleteByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, context); + } + + private EnvironmentsClient serviceClient() { + return this.innerClient; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + public SCEnvironmentRecordImpl define(String name) { + return new SCEnvironmentRecordImpl(name, this.manager()); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCClusterRecordImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCClusterRecordImpl.java index 2264401bf9f1..e3b9a75d6a1d 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCClusterRecordImpl.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCClusterRecordImpl.java @@ -4,23 +4,18 @@ package com.azure.resourcemanager.confluent.implementation; +import com.azure.core.util.Context; import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; import com.azure.resourcemanager.confluent.models.SCClusterRecord; import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -public final class SCClusterRecordImpl implements SCClusterRecord { +public final class SCClusterRecordImpl implements SCClusterRecord, SCClusterRecord.Definition, SCClusterRecord.Update { private SCClusterRecordInner innerObject; private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; - SCClusterRecordImpl(SCClusterRecordInner innerObject, - com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { - this.innerObject = innerObject; - this.serviceManager = serviceManager; - } - public String kind() { return this.innerModel().kind(); } @@ -29,6 +24,10 @@ public String id() { return this.innerModel().id(); } + public String type() { + return this.innerModel().type(); + } + public String name() { return this.innerModel().name(); } @@ -45,6 +44,10 @@ public ClusterStatusEntity status() { return this.innerModel().status(); } + public String resourceGroupName() { + return resourceGroupName; + } + public SCClusterRecordInner innerModel() { return this.innerObject; } @@ -52,4 +55,106 @@ public SCClusterRecordInner innerModel() { private com.azure.resourcemanager.confluent.ConfluentManager manager() { return this.serviceManager; } + + private String resourceGroupName; + + private String organizationName; + + private String environmentId; + + private String clusterId; + + public SCClusterRecordImpl withExistingEnvironment(String resourceGroupName, String organizationName, + String environmentId) { + this.resourceGroupName = resourceGroupName; + this.organizationName = organizationName; + this.environmentId = environmentId; + return this; + } + + public SCClusterRecord create() { + this.innerObject = serviceManager.serviceClient() + .getClusters() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, + this.innerModel(), Context.NONE) + .getValue(); + return this; + } + + public SCClusterRecord create(Context context) { + this.innerObject = serviceManager.serviceClient() + .getClusters() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, + this.innerModel(), context) + .getValue(); + return this; + } + + SCClusterRecordImpl(String name, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = new SCClusterRecordInner(); + this.serviceManager = serviceManager; + this.clusterId = name; + } + + public SCClusterRecordImpl update() { + return this; + } + + public SCClusterRecord apply() { + this.innerObject = serviceManager.serviceClient() + .getClusters() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, + this.innerModel(), Context.NONE) + .getValue(); + return this; + } + + public SCClusterRecord apply(Context context) { + this.innerObject = serviceManager.serviceClient() + .getClusters() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, clusterId, + this.innerModel(), context) + .getValue(); + return this; + } + + SCClusterRecordImpl(SCClusterRecordInner innerObject, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = innerObject; + this.serviceManager = serviceManager; + this.resourceGroupName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "resourceGroups"); + this.organizationName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "organizations"); + this.environmentId = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "environments"); + this.clusterId = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "clusters"); + } + + public SCClusterRecordImpl withKind(String kind) { + this.innerModel().withKind(kind); + return this; + } + + public SCClusterRecordImpl withType(String type) { + this.innerModel().withType(type); + return this; + } + + public SCClusterRecordImpl withName(String name) { + this.innerModel().withName(name); + return this; + } + + public SCClusterRecordImpl withMetadata(SCMetadataEntity metadata) { + this.innerModel().withMetadata(metadata); + return this; + } + + public SCClusterRecordImpl withSpec(SCClusterSpecEntity spec) { + this.innerModel().withSpec(spec); + return this; + } + + public SCClusterRecordImpl withStatus(ClusterStatusEntity status) { + this.innerModel().withStatus(status); + return this; + } } diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCEnvironmentRecordImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCEnvironmentRecordImpl.java index 9910b0830246..50a40e132148 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCEnvironmentRecordImpl.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/SCEnvironmentRecordImpl.java @@ -4,21 +4,18 @@ package com.azure.resourcemanager.confluent.implementation; +import com.azure.core.util.Context; import com.azure.resourcemanager.confluent.fluent.models.SCEnvironmentRecordInner; import com.azure.resourcemanager.confluent.models.SCEnvironmentRecord; import com.azure.resourcemanager.confluent.models.SCMetadataEntity; +import com.azure.resourcemanager.confluent.models.StreamGovernanceConfig; -public final class SCEnvironmentRecordImpl implements SCEnvironmentRecord { +public final class SCEnvironmentRecordImpl + implements SCEnvironmentRecord, SCEnvironmentRecord.Definition, SCEnvironmentRecord.Update { private SCEnvironmentRecordInner innerObject; private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; - SCEnvironmentRecordImpl(SCEnvironmentRecordInner innerObject, - com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { - this.innerObject = innerObject; - this.serviceManager = serviceManager; - } - public String kind() { return this.innerModel().kind(); } @@ -27,14 +24,26 @@ public String id() { return this.innerModel().id(); } + public String type() { + return this.innerModel().type(); + } + public String name() { return this.innerModel().name(); } + public StreamGovernanceConfig streamGovernanceConfig() { + return this.innerModel().streamGovernanceConfig(); + } + public SCMetadataEntity metadata() { return this.innerModel().metadata(); } + public String resourceGroupName() { + return resourceGroupName; + } + public SCEnvironmentRecordInner innerModel() { return this.innerObject; } @@ -42,4 +51,94 @@ public SCEnvironmentRecordInner innerModel() { private com.azure.resourcemanager.confluent.ConfluentManager manager() { return this.serviceManager; } + + private String resourceGroupName; + + private String organizationName; + + private String environmentId; + + public SCEnvironmentRecordImpl withExistingOrganization(String resourceGroupName, String organizationName) { + this.resourceGroupName = resourceGroupName; + this.organizationName = organizationName; + return this; + } + + public SCEnvironmentRecord create() { + this.innerObject = serviceManager.serviceClient() + .getEnvironments() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, this.innerModel(), + Context.NONE) + .getValue(); + return this; + } + + public SCEnvironmentRecord create(Context context) { + this.innerObject = serviceManager.serviceClient() + .getEnvironments() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, this.innerModel(), context) + .getValue(); + return this; + } + + SCEnvironmentRecordImpl(String name, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = new SCEnvironmentRecordInner(); + this.serviceManager = serviceManager; + this.environmentId = name; + } + + public SCEnvironmentRecordImpl update() { + return this; + } + + public SCEnvironmentRecord apply() { + this.innerObject = serviceManager.serviceClient() + .getEnvironments() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, this.innerModel(), + Context.NONE) + .getValue(); + return this; + } + + public SCEnvironmentRecord apply(Context context) { + this.innerObject = serviceManager.serviceClient() + .getEnvironments() + .createOrUpdateWithResponse(resourceGroupName, organizationName, environmentId, this.innerModel(), context) + .getValue(); + return this; + } + + SCEnvironmentRecordImpl(SCEnvironmentRecordInner innerObject, + com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = innerObject; + this.serviceManager = serviceManager; + this.resourceGroupName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "resourceGroups"); + this.organizationName = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "organizations"); + this.environmentId = ResourceManagerUtils.getValueFromIdByName(innerObject.id(), "environments"); + } + + public SCEnvironmentRecordImpl withKind(String kind) { + this.innerModel().withKind(kind); + return this; + } + + public SCEnvironmentRecordImpl withType(String type) { + this.innerModel().withType(type); + return this; + } + + public SCEnvironmentRecordImpl withName(String name) { + this.innerModel().withName(name); + return this; + } + + public SCEnvironmentRecordImpl withStreamGovernanceConfig(StreamGovernanceConfig streamGovernanceConfig) { + this.innerModel().withStreamGovernanceConfig(streamGovernanceConfig); + return this; + } + + public SCEnvironmentRecordImpl withMetadata(SCMetadataEntity metadata) { + this.innerModel().withMetadata(metadata); + return this; + } } diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicRecordImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicRecordImpl.java new file mode 100644 index 000000000000..ab7dc51f87e3 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicRecordImpl.java @@ -0,0 +1,190 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.util.Context; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; +import com.azure.resourcemanager.confluent.models.TopicMetadataEntity; +import com.azure.resourcemanager.confluent.models.TopicRecord; +import com.azure.resourcemanager.confluent.models.TopicsInputConfig; +import com.azure.resourcemanager.confluent.models.TopicsRelatedLink; +import java.util.Collections; +import java.util.List; + +public final class TopicRecordImpl implements TopicRecord, TopicRecord.Definition { + private TopicRecordInner innerObject; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + TopicRecordImpl(TopicRecordInner innerObject, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = innerObject; + this.serviceManager = serviceManager; + } + + public String id() { + return this.innerModel().id(); + } + + public String name() { + return this.innerModel().name(); + } + + public String type() { + return this.innerModel().type(); + } + + public String kind() { + return this.innerModel().kind(); + } + + public String topicId() { + return this.innerModel().topicId(); + } + + public TopicMetadataEntity metadata() { + return this.innerModel().metadata(); + } + + public TopicsRelatedLink partitions() { + return this.innerModel().partitions(); + } + + public TopicsRelatedLink configs() { + return this.innerModel().configs(); + } + + public List inputConfigs() { + List inner = this.innerModel().inputConfigs(); + if (inner != null) { + return Collections.unmodifiableList(inner); + } else { + return Collections.emptyList(); + } + } + + public TopicsRelatedLink partitionsReassignments() { + return this.innerModel().partitionsReassignments(); + } + + public String partitionsCount() { + return this.innerModel().partitionsCount(); + } + + public String replicationFactor() { + return this.innerModel().replicationFactor(); + } + + public TopicRecordInner innerModel() { + return this.innerObject; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + private String resourceGroupName; + + private String organizationName; + + private String environmentId; + + private String clusterId; + + private String topicName; + + public TopicRecordImpl withExistingCluster(String resourceGroupName, String organizationName, String environmentId, + String clusterId) { + this.resourceGroupName = resourceGroupName; + this.organizationName = organizationName; + this.environmentId = environmentId; + this.clusterId = clusterId; + return this; + } + + public TopicRecord create() { + this.innerObject = serviceManager.serviceClient() + .getTopics() + .createWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, + this.innerModel(), Context.NONE) + .getValue(); + return this; + } + + public TopicRecord create(Context context) { + this.innerObject = serviceManager.serviceClient() + .getTopics() + .createWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, + this.innerModel(), context) + .getValue(); + return this; + } + + TopicRecordImpl(String name, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerObject = new TopicRecordInner(); + this.serviceManager = serviceManager; + this.topicName = name; + } + + public TopicRecord refresh() { + this.innerObject = serviceManager.serviceClient() + .getTopics() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, Context.NONE) + .getValue(); + return this; + } + + public TopicRecord refresh(Context context) { + this.innerObject = serviceManager.serviceClient() + .getTopics() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, context) + .getValue(); + return this; + } + + public TopicRecordImpl withKind(String kind) { + this.innerModel().withKind(kind); + return this; + } + + public TopicRecordImpl withTopicId(String topicId) { + this.innerModel().withTopicId(topicId); + return this; + } + + public TopicRecordImpl withMetadata(TopicMetadataEntity metadata) { + this.innerModel().withMetadata(metadata); + return this; + } + + public TopicRecordImpl withPartitions(TopicsRelatedLink partitions) { + this.innerModel().withPartitions(partitions); + return this; + } + + public TopicRecordImpl withConfigs(TopicsRelatedLink configs) { + this.innerModel().withConfigs(configs); + return this; + } + + public TopicRecordImpl withInputConfigs(List inputConfigs) { + this.innerModel().withInputConfigs(inputConfigs); + return this; + } + + public TopicRecordImpl withPartitionsReassignments(TopicsRelatedLink partitionsReassignments) { + this.innerModel().withPartitionsReassignments(partitionsReassignments); + return this; + } + + public TopicRecordImpl withPartitionsCount(String partitionsCount) { + this.innerModel().withPartitionsCount(partitionsCount); + return this; + } + + public TopicRecordImpl withReplicationFactor(String replicationFactor) { + this.innerModel().withReplicationFactor(replicationFactor); + return this; + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsClientImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsClientImpl.java new file mode 100644 index 000000000000..557154f274aa --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsClientImpl.java @@ -0,0 +1,980 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.annotation.BodyParam; +import com.azure.core.annotation.Delete; +import com.azure.core.annotation.ExpectedResponses; +import com.azure.core.annotation.Get; +import com.azure.core.annotation.HeaderParam; +import com.azure.core.annotation.Headers; +import com.azure.core.annotation.Host; +import com.azure.core.annotation.HostParam; +import com.azure.core.annotation.PathParam; +import com.azure.core.annotation.Put; +import com.azure.core.annotation.QueryParam; +import com.azure.core.annotation.ReturnType; +import com.azure.core.annotation.ServiceInterface; +import com.azure.core.annotation.ServiceMethod; +import com.azure.core.annotation.UnexpectedResponseExceptionType; +import com.azure.core.http.rest.PagedFlux; +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.PagedResponse; +import com.azure.core.http.rest.PagedResponseBase; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.RestProxy; +import com.azure.core.management.exception.ManagementException; +import com.azure.core.management.polling.PollResult; +import com.azure.core.util.Context; +import com.azure.core.util.FluxUtil; +import com.azure.core.util.polling.PollerFlux; +import com.azure.core.util.polling.SyncPoller; +import com.azure.resourcemanager.confluent.fluent.TopicsClient; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; +import com.azure.resourcemanager.confluent.models.ListTopicsSuccessResponse; +import java.nio.ByteBuffer; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +/** + * An instance of this class provides access to all the operations defined in TopicsClient. + */ +public final class TopicsClientImpl implements TopicsClient { + /** + * The proxy service used to perform REST calls. + */ + private final TopicsService service; + + /** + * The service client containing this operation class. + */ + private final ConfluentManagementClientImpl client; + + /** + * Initializes an instance of TopicsClientImpl. + * + * @param client the instance of the service client containing this operation class. + */ + TopicsClientImpl(ConfluentManagementClientImpl client) { + this.service = RestProxy.create(TopicsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); + this.client = client; + } + + /** + * The interface defining all the services for ConfluentManagementClientTopics to be used by the proxy service to + * perform REST calls. + */ + @Host("{$host}") + @ServiceInterface(name = "ConfluentManagementC") + public interface TopicsService { + @Headers({ "Content-Type: application/json" }) + @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/topics") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> list(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @QueryParam("pageSize") Integer pageSize, + @QueryParam("pageToken") String pageToken, @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/topics/{topicName}") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> get(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("topicName") String topicName, + @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/topics/{topicName}") + @ExpectedResponses({ 200, 201 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> create(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("topicName") String topicName, + @BodyParam("application/json") TopicRecordInner body, @HeaderParam("Accept") String accept, + Context context); + + @Headers({ "Content-Type: application/json" }) + @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Confluent/organizations/{organizationName}/environments/{environmentId}/clusters/{clusterId}/topics/{topicName}") + @ExpectedResponses({ 202, 204 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono>> delete(@HostParam("$host") String endpoint, + @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, + @PathParam("resourceGroupName") String resourceGroupName, + @PathParam("organizationName") String organizationName, @PathParam("environmentId") String environmentId, + @PathParam("clusterId") String clusterId, @PathParam("topicName") String topicName, + @HeaderParam("Accept") String accept, Context context); + + @Headers({ "Content-Type: application/json" }) + @Get("{nextLink}") + @ExpectedResponses({ 200 }) + @UnexpectedResponseExceptionType(ManagementException.class) + Mono> listNext( + @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, + @HeaderParam("Accept") String accept, Context context); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listSinglePageAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.list(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken, accept, context)) + .>map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), + res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listSinglePageAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service + .list(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, accept, context) + .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), + res.getValue().value(), res.getValue().nextLink(), null)); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken) { + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken), nextLink -> listNextSinglePageAsync(nextLink)); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId) { + final Integer pageSize = null; + final String pageToken = null; + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken), nextLink -> listNextSinglePageAsync(nextLink)); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedFlux}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + private PagedFlux listAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, Integer pageSize, String pageToken, Context context) { + return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, organizationName, environmentId, clusterId, + pageSize, pageToken, context), nextLink -> listNextSinglePageAsync(nextLink, context)); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + public PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId) { + final Integer pageSize = null; + final String pageToken = null; + return new PagedIterable<>( + listAsync(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken)); + } + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + @ServiceMethod(returns = ReturnType.COLLECTION) + public PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context) { + return new PagedIterable<>( + listAsync(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, context)); + } + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> getWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.get(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + topicName, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> getWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.get(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, topicName, accept, context); + } + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono getAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName) { + return getWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName) + .flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response getWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + return getWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, context) + .block(); + } + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public TopicRecordInner get(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName) { + return getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, Context.NONE) + .getValue(); + } + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param body Confluent Topics resource model. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, TopicRecordInner body) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.create(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + topicName, body, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param body Confluent Topics resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record along with {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> createWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, TopicRecordInner body, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + if (body != null) { + body.validate(); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.create(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, topicName, body, accept, context); + } + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono createAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName) { + final TopicRecordInner body = null; + return createWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, body) + .flatMap(res -> Mono.justOrEmpty(res.getValue())); + } + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param body Confluent Topics resource model. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record along with {@link Response}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public Response createWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, TopicRecordInner body, Context context) { + return createWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, body, + context).block(); + } + + /** + * Create confluent topics by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return details of topic record. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public TopicRecordInner create(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName) { + final TopicRecordInner body = null; + return createWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, body, + Context.NONE).getValue(); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil + .withContext(context -> service.delete(this.client.getEndpoint(), this.client.getApiVersion(), + this.client.getSubscriptionId(), resourceGroupName, organizationName, environmentId, clusterId, + topicName, accept, context)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link Response} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono>> deleteWithResponseAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + if (this.client.getSubscriptionId() == null) { + return Mono.error(new IllegalArgumentException( + "Parameter this.client.getSubscriptionId() is required and cannot be null.")); + } + if (resourceGroupName == null) { + return Mono + .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); + } + if (organizationName == null) { + return Mono + .error(new IllegalArgumentException("Parameter organizationName is required and cannot be null.")); + } + if (environmentId == null) { + return Mono.error(new IllegalArgumentException("Parameter environmentId is required and cannot be null.")); + } + if (clusterId == null) { + return Mono.error(new IllegalArgumentException("Parameter clusterId is required and cannot be null.")); + } + if (topicName == null) { + return Mono.error(new IllegalArgumentException("Parameter topicName is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.delete(this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), + resourceGroupName, organizationName, environmentId, clusterId, topicName, accept, context); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName) { + Mono>> mono + = deleteWithResponseAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + this.client.getContext()); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link PollerFlux} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + private PollerFlux, Void> beginDeleteAsync(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + context = this.client.mergeContext(context); + Mono>> mono = deleteWithResponseAsync(resourceGroupName, organizationName, + environmentId, clusterId, topicName, context); + return this.client.getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, + context); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName) + .getSyncPoller(); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return the {@link SyncPoller} for polling of long-running operation. + */ + @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) + public SyncPoller, Void> beginDelete(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + return this.beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, context) + .getSyncPoller(); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName).last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return A {@link Mono} that completes when a successful response is received. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono deleteAsync(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName, Context context) { + return beginDeleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, context) + .last() + .flatMap(this.client::getLroFinalResultOrError); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName).block(); + } + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName, Context context) { + deleteAsync(resourceGroupName, organizationName, environmentId, clusterId, topicName, context).block(); + } + + /** + * Get the next page of items. + * + * @param nextLink The URL to get the next list of items. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listNextSinglePageAsync(String nextLink) { + if (nextLink == null) { + return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); + } + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + final String accept = "application/json"; + return FluxUtil.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) + .>map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), + res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) + .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); + } + + /** + * Get the next page of items. + * + * @param nextLink The URL to get the next list of items. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization along with + * {@link PagedResponse} on successful completion of {@link Mono}. + */ + @ServiceMethod(returns = ReturnType.SINGLE) + private Mono> listNextSinglePageAsync(String nextLink, Context context) { + if (nextLink == null) { + return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); + } + if (this.client.getEndpoint() == null) { + return Mono.error( + new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null.")); + } + final String accept = "application/json"; + context = this.client.mergeContext(context); + return service.listNext(nextLink, this.client.getEndpoint(), accept, context) + .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), + res.getValue().value(), res.getValue().nextLink(), null)); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsImpl.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsImpl.java new file mode 100644 index 000000000000..5348d2bca4b7 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/implementation/TopicsImpl.java @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.implementation; + +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.http.rest.SimpleResponse; +import com.azure.core.util.Context; +import com.azure.core.util.logging.ClientLogger; +import com.azure.resourcemanager.confluent.fluent.TopicsClient; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; +import com.azure.resourcemanager.confluent.models.TopicRecord; +import com.azure.resourcemanager.confluent.models.Topics; + +public final class TopicsImpl implements Topics { + private static final ClientLogger LOGGER = new ClientLogger(TopicsImpl.class); + + private final TopicsClient innerClient; + + private final com.azure.resourcemanager.confluent.ConfluentManager serviceManager; + + public TopicsImpl(TopicsClient innerClient, com.azure.resourcemanager.confluent.ConfluentManager serviceManager) { + this.innerClient = innerClient; + this.serviceManager = serviceManager; + } + + public PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId) { + PagedIterable inner + = this.serviceClient().list(resourceGroupName, organizationName, environmentId, clusterId); + return ResourceManagerUtils.mapPage(inner, inner1 -> new TopicRecordImpl(inner1, this.manager())); + } + + public PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context) { + PagedIterable inner = this.serviceClient() + .list(resourceGroupName, organizationName, environmentId, clusterId, pageSize, pageToken, context); + return ResourceManagerUtils.mapPage(inner, inner1 -> new TopicRecordImpl(inner1, this.manager())); + } + + public Response getWithResponse(String resourceGroupName, String organizationName, + String environmentId, String clusterId, String topicName, Context context) { + Response inner = this.serviceClient() + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, context); + if (inner != null) { + return new SimpleResponse<>(inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), + new TopicRecordImpl(inner.getValue(), this.manager())); + } else { + return null; + } + } + + public TopicRecord get(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName) { + TopicRecordInner inner + = this.serviceClient().get(resourceGroupName, organizationName, environmentId, clusterId, topicName); + if (inner != null) { + return new TopicRecordImpl(inner, this.manager()); + } else { + return null; + } + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, clusterId, topicName); + } + + public void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName, Context context) { + this.serviceClient().delete(resourceGroupName, organizationName, environmentId, clusterId, topicName, context); + } + + public TopicRecord getById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String topicName = ResourceManagerUtils.getValueFromIdByName(id, "topics"); + if (topicName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'topics'.", id))); + } + return this + .getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, Context.NONE) + .getValue(); + } + + public Response getByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String topicName = ResourceManagerUtils.getValueFromIdByName(id, "topics"); + if (topicName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'topics'.", id))); + } + return this.getWithResponse(resourceGroupName, organizationName, environmentId, clusterId, topicName, context); + } + + public void deleteById(String id) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String topicName = ResourceManagerUtils.getValueFromIdByName(id, "topics"); + if (topicName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'topics'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, topicName, Context.NONE); + } + + public void deleteByIdWithResponse(String id, Context context) { + String resourceGroupName = ResourceManagerUtils.getValueFromIdByName(id, "resourceGroups"); + if (resourceGroupName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); + } + String organizationName = ResourceManagerUtils.getValueFromIdByName(id, "organizations"); + if (organizationName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'organizations'.", id))); + } + String environmentId = ResourceManagerUtils.getValueFromIdByName(id, "environments"); + if (environmentId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'environments'.", id))); + } + String clusterId = ResourceManagerUtils.getValueFromIdByName(id, "clusters"); + if (clusterId == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'clusters'.", id))); + } + String topicName = ResourceManagerUtils.getValueFromIdByName(id, "topics"); + if (topicName == null) { + throw LOGGER.logExceptionAsError(new IllegalArgumentException( + String.format("The resource ID '%s' is not valid. Missing path segment 'topics'.", id))); + } + this.delete(resourceGroupName, organizationName, environmentId, clusterId, topicName, context); + } + + private TopicsClient serviceClient() { + return this.innerClient; + } + + private com.azure.resourcemanager.confluent.ConfluentManager manager() { + return this.serviceManager; + } + + public TopicRecordImpl define(String name) { + return new TopicRecordImpl(name, this.manager()); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AuthType.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AuthType.java new file mode 100644 index 000000000000..8fd46218aaf9 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AuthType.java @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * authType + * + * Kafka Connector Auth Type. + */ +public final class AuthType extends ExpandableStringEnum { + /** + * Static value SERVICE_ACCOUNT for AuthType. + */ + public static final AuthType SERVICE_ACCOUNT = fromString("SERVICE_ACCOUNT"); + + /** + * Static value KAFKA_API_KEY for AuthType. + */ + public static final AuthType KAFKA_API_KEY = fromString("KAFKA_API_KEY"); + + /** + * Creates a new instance of AuthType value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public AuthType() { + } + + /** + * Creates or finds a AuthType from its string representation. + * + * @param name a name to look for. + * @return the corresponding AuthType. + */ + public static AuthType fromString(String name) { + return fromString(name, AuthType.class); + } + + /** + * Gets known AuthType values. + * + * @return known AuthType values. + */ + public static Collection values() { + return values(AuthType.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSinkConnectorServiceInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSinkConnectorServiceInfo.java new file mode 100644 index 000000000000..71b3858873f6 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSinkConnectorServiceInfo.java @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The authentication info when auth_type is azureBlobStorageSinkConnector. + */ +@Fluent +public final class AzureBlobStorageSinkConnectorServiceInfo extends ConnectorServiceTypeInfoBase { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.AZURE_BLOB_STORAGE_SINK_CONNECTOR; + + /* + * Azure Blob Storage Account Name + */ + private String storageAccountName; + + /* + * Azure Blob Storage Account Key + */ + private String storageAccountKey; + + /* + * Azure Blob Storage Account Container Name + */ + private String storageContainerName; + + /** + * Creates an instance of AzureBlobStorageSinkConnectorServiceInfo class. + */ + public AzureBlobStorageSinkConnectorServiceInfo() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + @Override + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Get the storageAccountName property: Azure Blob Storage Account Name. + * + * @return the storageAccountName value. + */ + public String storageAccountName() { + return this.storageAccountName; + } + + /** + * Set the storageAccountName property: Azure Blob Storage Account Name. + * + * @param storageAccountName the storageAccountName value to set. + * @return the AzureBlobStorageSinkConnectorServiceInfo object itself. + */ + public AzureBlobStorageSinkConnectorServiceInfo withStorageAccountName(String storageAccountName) { + this.storageAccountName = storageAccountName; + return this; + } + + /** + * Get the storageAccountKey property: Azure Blob Storage Account Key. + * + * @return the storageAccountKey value. + */ + public String storageAccountKey() { + return this.storageAccountKey; + } + + /** + * Set the storageAccountKey property: Azure Blob Storage Account Key. + * + * @param storageAccountKey the storageAccountKey value to set. + * @return the AzureBlobStorageSinkConnectorServiceInfo object itself. + */ + public AzureBlobStorageSinkConnectorServiceInfo withStorageAccountKey(String storageAccountKey) { + this.storageAccountKey = storageAccountKey; + return this; + } + + /** + * Get the storageContainerName property: Azure Blob Storage Account Container Name. + * + * @return the storageContainerName value. + */ + public String storageContainerName() { + return this.storageContainerName; + } + + /** + * Set the storageContainerName property: Azure Blob Storage Account Container Name. + * + * @param storageContainerName the storageContainerName value to set. + * @return the AzureBlobStorageSinkConnectorServiceInfo object itself. + */ + public AzureBlobStorageSinkConnectorServiceInfo withStorageContainerName(String storageContainerName) { + this.storageContainerName = storageContainerName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + jsonWriter.writeStringField("storageAccountName", this.storageAccountName); + jsonWriter.writeStringField("storageAccountKey", this.storageAccountKey); + jsonWriter.writeStringField("storageContainerName", this.storageContainerName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageSinkConnectorServiceInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageSinkConnectorServiceInfo if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageSinkConnectorServiceInfo. + */ + public static AzureBlobStorageSinkConnectorServiceInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageSinkConnectorServiceInfo deserializedAzureBlobStorageSinkConnectorServiceInfo + = new AzureBlobStorageSinkConnectorServiceInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedAzureBlobStorageSinkConnectorServiceInfo.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else if ("storageAccountName".equals(fieldName)) { + deserializedAzureBlobStorageSinkConnectorServiceInfo.storageAccountName = reader.getString(); + } else if ("storageAccountKey".equals(fieldName)) { + deserializedAzureBlobStorageSinkConnectorServiceInfo.storageAccountKey = reader.getString(); + } else if ("storageContainerName".equals(fieldName)) { + deserializedAzureBlobStorageSinkConnectorServiceInfo.storageContainerName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobStorageSinkConnectorServiceInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSourceConnectorServiceInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSourceConnectorServiceInfo.java new file mode 100644 index 000000000000..51c4429085ed --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureBlobStorageSourceConnectorServiceInfo.java @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The connector service type is AzureBlobStorageSourceConnector. + */ +@Fluent +public final class AzureBlobStorageSourceConnectorServiceInfo extends ConnectorServiceTypeInfoBase { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.AZURE_BLOB_STORAGE_SOURCE_CONNECTOR; + + /* + * Azure Blob Storage Account Name + */ + private String storageAccountName; + + /* + * Azure Blob Storage Account Key + */ + private String storageAccountKey; + + /* + * Azure Blob Storage Account Container Name + */ + private String storageContainerName; + + /** + * Creates an instance of AzureBlobStorageSourceConnectorServiceInfo class. + */ + public AzureBlobStorageSourceConnectorServiceInfo() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + @Override + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Get the storageAccountName property: Azure Blob Storage Account Name. + * + * @return the storageAccountName value. + */ + public String storageAccountName() { + return this.storageAccountName; + } + + /** + * Set the storageAccountName property: Azure Blob Storage Account Name. + * + * @param storageAccountName the storageAccountName value to set. + * @return the AzureBlobStorageSourceConnectorServiceInfo object itself. + */ + public AzureBlobStorageSourceConnectorServiceInfo withStorageAccountName(String storageAccountName) { + this.storageAccountName = storageAccountName; + return this; + } + + /** + * Get the storageAccountKey property: Azure Blob Storage Account Key. + * + * @return the storageAccountKey value. + */ + public String storageAccountKey() { + return this.storageAccountKey; + } + + /** + * Set the storageAccountKey property: Azure Blob Storage Account Key. + * + * @param storageAccountKey the storageAccountKey value to set. + * @return the AzureBlobStorageSourceConnectorServiceInfo object itself. + */ + public AzureBlobStorageSourceConnectorServiceInfo withStorageAccountKey(String storageAccountKey) { + this.storageAccountKey = storageAccountKey; + return this; + } + + /** + * Get the storageContainerName property: Azure Blob Storage Account Container Name. + * + * @return the storageContainerName value. + */ + public String storageContainerName() { + return this.storageContainerName; + } + + /** + * Set the storageContainerName property: Azure Blob Storage Account Container Name. + * + * @param storageContainerName the storageContainerName value to set. + * @return the AzureBlobStorageSourceConnectorServiceInfo object itself. + */ + public AzureBlobStorageSourceConnectorServiceInfo withStorageContainerName(String storageContainerName) { + this.storageContainerName = storageContainerName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + jsonWriter.writeStringField("storageAccountName", this.storageAccountName); + jsonWriter.writeStringField("storageAccountKey", this.storageAccountKey); + jsonWriter.writeStringField("storageContainerName", this.storageContainerName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureBlobStorageSourceConnectorServiceInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureBlobStorageSourceConnectorServiceInfo if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureBlobStorageSourceConnectorServiceInfo. + */ + public static AzureBlobStorageSourceConnectorServiceInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureBlobStorageSourceConnectorServiceInfo deserializedAzureBlobStorageSourceConnectorServiceInfo + = new AzureBlobStorageSourceConnectorServiceInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedAzureBlobStorageSourceConnectorServiceInfo.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else if ("storageAccountName".equals(fieldName)) { + deserializedAzureBlobStorageSourceConnectorServiceInfo.storageAccountName = reader.getString(); + } else if ("storageAccountKey".equals(fieldName)) { + deserializedAzureBlobStorageSourceConnectorServiceInfo.storageAccountKey = reader.getString(); + } else if ("storageContainerName".equals(fieldName)) { + deserializedAzureBlobStorageSourceConnectorServiceInfo.storageContainerName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureBlobStorageSourceConnectorServiceInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSinkConnectorServiceInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSinkConnectorServiceInfo.java new file mode 100644 index 000000000000..b1fae01146b4 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSinkConnectorServiceInfo.java @@ -0,0 +1,226 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The authentication info when auth_type is AzureCosmosDBSinkConnector. + */ +@Fluent +public final class AzureCosmosDBSinkConnectorServiceInfo extends ConnectorServiceTypeInfoBase { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.AZURE_COSMOS_DBSINK_CONNECTOR; + + /* + * Azure Cosmos Database Name + */ + private String cosmosDatabaseName; + + /* + * Azure Cosmos Database Master Key + */ + private String cosmosMasterKey; + + /* + * Azure Cosmos Database Connection Endpoint + */ + private String cosmosConnectionEndpoint; + + /* + * Azure Cosmos Database Containers Topic Mapping + */ + private String cosmosContainersTopicMapping; + + /* + * Azure Cosmos Database Id Strategy + */ + private String cosmosIdStrategy; + + /** + * Creates an instance of AzureCosmosDBSinkConnectorServiceInfo class. + */ + public AzureCosmosDBSinkConnectorServiceInfo() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + @Override + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Get the cosmosDatabaseName property: Azure Cosmos Database Name. + * + * @return the cosmosDatabaseName value. + */ + public String cosmosDatabaseName() { + return this.cosmosDatabaseName; + } + + /** + * Set the cosmosDatabaseName property: Azure Cosmos Database Name. + * + * @param cosmosDatabaseName the cosmosDatabaseName value to set. + * @return the AzureCosmosDBSinkConnectorServiceInfo object itself. + */ + public AzureCosmosDBSinkConnectorServiceInfo withCosmosDatabaseName(String cosmosDatabaseName) { + this.cosmosDatabaseName = cosmosDatabaseName; + return this; + } + + /** + * Get the cosmosMasterKey property: Azure Cosmos Database Master Key. + * + * @return the cosmosMasterKey value. + */ + public String cosmosMasterKey() { + return this.cosmosMasterKey; + } + + /** + * Set the cosmosMasterKey property: Azure Cosmos Database Master Key. + * + * @param cosmosMasterKey the cosmosMasterKey value to set. + * @return the AzureCosmosDBSinkConnectorServiceInfo object itself. + */ + public AzureCosmosDBSinkConnectorServiceInfo withCosmosMasterKey(String cosmosMasterKey) { + this.cosmosMasterKey = cosmosMasterKey; + return this; + } + + /** + * Get the cosmosConnectionEndpoint property: Azure Cosmos Database Connection Endpoint. + * + * @return the cosmosConnectionEndpoint value. + */ + public String cosmosConnectionEndpoint() { + return this.cosmosConnectionEndpoint; + } + + /** + * Set the cosmosConnectionEndpoint property: Azure Cosmos Database Connection Endpoint. + * + * @param cosmosConnectionEndpoint the cosmosConnectionEndpoint value to set. + * @return the AzureCosmosDBSinkConnectorServiceInfo object itself. + */ + public AzureCosmosDBSinkConnectorServiceInfo withCosmosConnectionEndpoint(String cosmosConnectionEndpoint) { + this.cosmosConnectionEndpoint = cosmosConnectionEndpoint; + return this; + } + + /** + * Get the cosmosContainersTopicMapping property: Azure Cosmos Database Containers Topic Mapping. + * + * @return the cosmosContainersTopicMapping value. + */ + public String cosmosContainersTopicMapping() { + return this.cosmosContainersTopicMapping; + } + + /** + * Set the cosmosContainersTopicMapping property: Azure Cosmos Database Containers Topic Mapping. + * + * @param cosmosContainersTopicMapping the cosmosContainersTopicMapping value to set. + * @return the AzureCosmosDBSinkConnectorServiceInfo object itself. + */ + public AzureCosmosDBSinkConnectorServiceInfo withCosmosContainersTopicMapping(String cosmosContainersTopicMapping) { + this.cosmosContainersTopicMapping = cosmosContainersTopicMapping; + return this; + } + + /** + * Get the cosmosIdStrategy property: Azure Cosmos Database Id Strategy. + * + * @return the cosmosIdStrategy value. + */ + public String cosmosIdStrategy() { + return this.cosmosIdStrategy; + } + + /** + * Set the cosmosIdStrategy property: Azure Cosmos Database Id Strategy. + * + * @param cosmosIdStrategy the cosmosIdStrategy value to set. + * @return the AzureCosmosDBSinkConnectorServiceInfo object itself. + */ + public AzureCosmosDBSinkConnectorServiceInfo withCosmosIdStrategy(String cosmosIdStrategy) { + this.cosmosIdStrategy = cosmosIdStrategy; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + jsonWriter.writeStringField("cosmosDatabaseName", this.cosmosDatabaseName); + jsonWriter.writeStringField("cosmosMasterKey", this.cosmosMasterKey); + jsonWriter.writeStringField("cosmosConnectionEndpoint", this.cosmosConnectionEndpoint); + jsonWriter.writeStringField("cosmosContainersTopicMapping", this.cosmosContainersTopicMapping); + jsonWriter.writeStringField("cosmosIdStrategy", this.cosmosIdStrategy); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureCosmosDBSinkConnectorServiceInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureCosmosDBSinkConnectorServiceInfo if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureCosmosDBSinkConnectorServiceInfo. + */ + public static AzureCosmosDBSinkConnectorServiceInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureCosmosDBSinkConnectorServiceInfo deserializedAzureCosmosDBSinkConnectorServiceInfo + = new AzureCosmosDBSinkConnectorServiceInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else if ("cosmosDatabaseName".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.cosmosDatabaseName = reader.getString(); + } else if ("cosmosMasterKey".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.cosmosMasterKey = reader.getString(); + } else if ("cosmosConnectionEndpoint".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.cosmosConnectionEndpoint = reader.getString(); + } else if ("cosmosContainersTopicMapping".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.cosmosContainersTopicMapping = reader.getString(); + } else if ("cosmosIdStrategy".equals(fieldName)) { + deserializedAzureCosmosDBSinkConnectorServiceInfo.cosmosIdStrategy = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureCosmosDBSinkConnectorServiceInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSourceConnectorServiceInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSourceConnectorServiceInfo.java new file mode 100644 index 000000000000..5c421e3f860c --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureCosmosDBSourceConnectorServiceInfo.java @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The authentication info when auth_type is AzureCosmosDBSourceConnector. + */ +@Fluent +public final class AzureCosmosDBSourceConnectorServiceInfo extends ConnectorServiceTypeInfoBase { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.AZURE_COSMOS_DBSOURCE_CONNECTOR; + + /* + * Azure Cosmos Database Name + */ + private String cosmosDatabaseName; + + /* + * Azure Cosmos Database Master Key + */ + private String cosmosMasterKey; + + /* + * Azure Cosmos Database Connection Endpoint + */ + private String cosmosConnectionEndpoint; + + /* + * Azure Cosmos Database Containers Topic Mapping + */ + private String cosmosContainersTopicMapping; + + /* + * Azure Cosmos Database Message Key Enabled + */ + private Boolean cosmosMessageKeyEnabled; + + /* + * Azure Cosmos Database Message Key Field + */ + private String cosmosMessageKeyField; + + /** + * Creates an instance of AzureCosmosDBSourceConnectorServiceInfo class. + */ + public AzureCosmosDBSourceConnectorServiceInfo() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + @Override + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Get the cosmosDatabaseName property: Azure Cosmos Database Name. + * + * @return the cosmosDatabaseName value. + */ + public String cosmosDatabaseName() { + return this.cosmosDatabaseName; + } + + /** + * Set the cosmosDatabaseName property: Azure Cosmos Database Name. + * + * @param cosmosDatabaseName the cosmosDatabaseName value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo withCosmosDatabaseName(String cosmosDatabaseName) { + this.cosmosDatabaseName = cosmosDatabaseName; + return this; + } + + /** + * Get the cosmosMasterKey property: Azure Cosmos Database Master Key. + * + * @return the cosmosMasterKey value. + */ + public String cosmosMasterKey() { + return this.cosmosMasterKey; + } + + /** + * Set the cosmosMasterKey property: Azure Cosmos Database Master Key. + * + * @param cosmosMasterKey the cosmosMasterKey value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo withCosmosMasterKey(String cosmosMasterKey) { + this.cosmosMasterKey = cosmosMasterKey; + return this; + } + + /** + * Get the cosmosConnectionEndpoint property: Azure Cosmos Database Connection Endpoint. + * + * @return the cosmosConnectionEndpoint value. + */ + public String cosmosConnectionEndpoint() { + return this.cosmosConnectionEndpoint; + } + + /** + * Set the cosmosConnectionEndpoint property: Azure Cosmos Database Connection Endpoint. + * + * @param cosmosConnectionEndpoint the cosmosConnectionEndpoint value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo withCosmosConnectionEndpoint(String cosmosConnectionEndpoint) { + this.cosmosConnectionEndpoint = cosmosConnectionEndpoint; + return this; + } + + /** + * Get the cosmosContainersTopicMapping property: Azure Cosmos Database Containers Topic Mapping. + * + * @return the cosmosContainersTopicMapping value. + */ + public String cosmosContainersTopicMapping() { + return this.cosmosContainersTopicMapping; + } + + /** + * Set the cosmosContainersTopicMapping property: Azure Cosmos Database Containers Topic Mapping. + * + * @param cosmosContainersTopicMapping the cosmosContainersTopicMapping value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo + withCosmosContainersTopicMapping(String cosmosContainersTopicMapping) { + this.cosmosContainersTopicMapping = cosmosContainersTopicMapping; + return this; + } + + /** + * Get the cosmosMessageKeyEnabled property: Azure Cosmos Database Message Key Enabled. + * + * @return the cosmosMessageKeyEnabled value. + */ + public Boolean cosmosMessageKeyEnabled() { + return this.cosmosMessageKeyEnabled; + } + + /** + * Set the cosmosMessageKeyEnabled property: Azure Cosmos Database Message Key Enabled. + * + * @param cosmosMessageKeyEnabled the cosmosMessageKeyEnabled value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo withCosmosMessageKeyEnabled(Boolean cosmosMessageKeyEnabled) { + this.cosmosMessageKeyEnabled = cosmosMessageKeyEnabled; + return this; + } + + /** + * Get the cosmosMessageKeyField property: Azure Cosmos Database Message Key Field. + * + * @return the cosmosMessageKeyField value. + */ + public String cosmosMessageKeyField() { + return this.cosmosMessageKeyField; + } + + /** + * Set the cosmosMessageKeyField property: Azure Cosmos Database Message Key Field. + * + * @param cosmosMessageKeyField the cosmosMessageKeyField value to set. + * @return the AzureCosmosDBSourceConnectorServiceInfo object itself. + */ + public AzureCosmosDBSourceConnectorServiceInfo withCosmosMessageKeyField(String cosmosMessageKeyField) { + this.cosmosMessageKeyField = cosmosMessageKeyField; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + jsonWriter.writeStringField("cosmosDatabaseName", this.cosmosDatabaseName); + jsonWriter.writeStringField("cosmosMasterKey", this.cosmosMasterKey); + jsonWriter.writeStringField("cosmosConnectionEndpoint", this.cosmosConnectionEndpoint); + jsonWriter.writeStringField("cosmosContainersTopicMapping", this.cosmosContainersTopicMapping); + jsonWriter.writeBooleanField("cosmosMessageKeyEnabled", this.cosmosMessageKeyEnabled); + jsonWriter.writeStringField("cosmosMessageKeyField", this.cosmosMessageKeyField); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureCosmosDBSourceConnectorServiceInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureCosmosDBSourceConnectorServiceInfo if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureCosmosDBSourceConnectorServiceInfo. + */ + public static AzureCosmosDBSourceConnectorServiceInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureCosmosDBSourceConnectorServiceInfo deserializedAzureCosmosDBSourceConnectorServiceInfo + = new AzureCosmosDBSourceConnectorServiceInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else if ("cosmosDatabaseName".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosDatabaseName = reader.getString(); + } else if ("cosmosMasterKey".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosMasterKey = reader.getString(); + } else if ("cosmosConnectionEndpoint".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosConnectionEndpoint = reader.getString(); + } else if ("cosmosContainersTopicMapping".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosContainersTopicMapping + = reader.getString(); + } else if ("cosmosMessageKeyEnabled".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosMessageKeyEnabled + = reader.getNullable(JsonReader::getBoolean); + } else if ("cosmosMessageKeyField".equals(fieldName)) { + deserializedAzureCosmosDBSourceConnectorServiceInfo.cosmosMessageKeyField = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureCosmosDBSourceConnectorServiceInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureSynapseAnalyticsSinkConnectorServiceInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureSynapseAnalyticsSinkConnectorServiceInfo.java new file mode 100644 index 000000000000..b0e8a9ccb79e --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/AzureSynapseAnalyticsSinkConnectorServiceInfo.java @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The authentication info when auth_type is AzureSynapseAnalyticsSinkConnector. + */ +@Fluent +public final class AzureSynapseAnalyticsSinkConnectorServiceInfo extends ConnectorServiceTypeInfoBase { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.AZURE_SYNAPSE_ANALYTICS_SINK_CONNECTOR; + + /* + * Azure Synapse Analytics SQL Server Name + */ + private String synapseSqlServerName; + + /* + * Azure Synapse SQL login details + */ + private String synapseSqlUser; + + /* + * Azure Synapse SQL login details + */ + private String synapseSqlPassword; + + /* + * Azure Synapse Dedicated SQL Pool Database Name + */ + private String synapseSqlDatabaseName; + + /** + * Creates an instance of AzureSynapseAnalyticsSinkConnectorServiceInfo class. + */ + public AzureSynapseAnalyticsSinkConnectorServiceInfo() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + @Override + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Get the synapseSqlServerName property: Azure Synapse Analytics SQL Server Name. + * + * @return the synapseSqlServerName value. + */ + public String synapseSqlServerName() { + return this.synapseSqlServerName; + } + + /** + * Set the synapseSqlServerName property: Azure Synapse Analytics SQL Server Name. + * + * @param synapseSqlServerName the synapseSqlServerName value to set. + * @return the AzureSynapseAnalyticsSinkConnectorServiceInfo object itself. + */ + public AzureSynapseAnalyticsSinkConnectorServiceInfo withSynapseSqlServerName(String synapseSqlServerName) { + this.synapseSqlServerName = synapseSqlServerName; + return this; + } + + /** + * Get the synapseSqlUser property: Azure Synapse SQL login details. + * + * @return the synapseSqlUser value. + */ + public String synapseSqlUser() { + return this.synapseSqlUser; + } + + /** + * Set the synapseSqlUser property: Azure Synapse SQL login details. + * + * @param synapseSqlUser the synapseSqlUser value to set. + * @return the AzureSynapseAnalyticsSinkConnectorServiceInfo object itself. + */ + public AzureSynapseAnalyticsSinkConnectorServiceInfo withSynapseSqlUser(String synapseSqlUser) { + this.synapseSqlUser = synapseSqlUser; + return this; + } + + /** + * Get the synapseSqlPassword property: Azure Synapse SQL login details. + * + * @return the synapseSqlPassword value. + */ + public String synapseSqlPassword() { + return this.synapseSqlPassword; + } + + /** + * Set the synapseSqlPassword property: Azure Synapse SQL login details. + * + * @param synapseSqlPassword the synapseSqlPassword value to set. + * @return the AzureSynapseAnalyticsSinkConnectorServiceInfo object itself. + */ + public AzureSynapseAnalyticsSinkConnectorServiceInfo withSynapseSqlPassword(String synapseSqlPassword) { + this.synapseSqlPassword = synapseSqlPassword; + return this; + } + + /** + * Get the synapseSqlDatabaseName property: Azure Synapse Dedicated SQL Pool Database Name. + * + * @return the synapseSqlDatabaseName value. + */ + public String synapseSqlDatabaseName() { + return this.synapseSqlDatabaseName; + } + + /** + * Set the synapseSqlDatabaseName property: Azure Synapse Dedicated SQL Pool Database Name. + * + * @param synapseSqlDatabaseName the synapseSqlDatabaseName value to set. + * @return the AzureSynapseAnalyticsSinkConnectorServiceInfo object itself. + */ + public AzureSynapseAnalyticsSinkConnectorServiceInfo withSynapseSqlDatabaseName(String synapseSqlDatabaseName) { + this.synapseSqlDatabaseName = synapseSqlDatabaseName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + jsonWriter.writeStringField("synapseSqlServerName", this.synapseSqlServerName); + jsonWriter.writeStringField("synapseSqlUser", this.synapseSqlUser); + jsonWriter.writeStringField("synapseSqlPassword", this.synapseSqlPassword); + jsonWriter.writeStringField("synapseSqlDatabaseName", this.synapseSqlDatabaseName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of AzureSynapseAnalyticsSinkConnectorServiceInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of AzureSynapseAnalyticsSinkConnectorServiceInfo if the JsonReader was pointing to an + * instance of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the AzureSynapseAnalyticsSinkConnectorServiceInfo. + */ + public static AzureSynapseAnalyticsSinkConnectorServiceInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + AzureSynapseAnalyticsSinkConnectorServiceInfo deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo + = new AzureSynapseAnalyticsSinkConnectorServiceInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else if ("synapseSqlServerName".equals(fieldName)) { + deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo.synapseSqlServerName = reader.getString(); + } else if ("synapseSqlUser".equals(fieldName)) { + deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo.synapseSqlUser = reader.getString(); + } else if ("synapseSqlPassword".equals(fieldName)) { + deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo.synapseSqlPassword = reader.getString(); + } else if ("synapseSqlDatabaseName".equals(fieldName)) { + deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo.synapseSqlDatabaseName + = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedAzureSynapseAnalyticsSinkConnectorServiceInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Clusters.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Clusters.java new file mode 100644 index 000000000000..a14330a32e4a --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Clusters.java @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.Context; + +/** + * Resource collection API of Clusters. + */ +public interface Clusters { + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId); + + /** + * Delete confluent cluster by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + Context context); + + /** + * Delete confluent cluster by id. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteById(String id); + + /** + * Delete confluent cluster by id. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteByIdWithResponse(String id, Context context); + + /** + * Begins definition for a new SCClusterRecord resource. + * + * @param name resource name. + * @return the first stage of the new SCClusterRecord definition. + */ + SCClusterRecord.DefinitionStages.Blank define(String name); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorClass.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorClass.java new file mode 100644 index 000000000000..cd71bceb7220 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorClass.java @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * ConnectorClass + * + * Connector Class. + */ +public final class ConnectorClass extends ExpandableStringEnum { + /** + * Static value AZUREBLOBSOURCE for ConnectorClass. + */ + public static final ConnectorClass AZUREBLOBSOURCE = fromString("AZUREBLOBSOURCE"); + + /** + * Static value AZUREBLOBSINK for ConnectorClass. + */ + public static final ConnectorClass AZUREBLOBSINK = fromString("AZUREBLOBSINK"); + + /** + * Creates a new instance of ConnectorClass value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public ConnectorClass() { + } + + /** + * Creates or finds a ConnectorClass from its string representation. + * + * @param name a name to look for. + * @return the corresponding ConnectorClass. + */ + public static ConnectorClass fromString(String name) { + return fromString(name, ConnectorClass.class); + } + + /** + * Gets known ConnectorClass values. + * + * @return known ConnectorClass values. + */ + public static Collection values() { + return values(ConnectorClass.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorInfoBase.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorInfoBase.java new file mode 100644 index 000000000000..2be31f4a53ff --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorInfoBase.java @@ -0,0 +1,207 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * Connector Info Base properties. + */ +@Fluent +public final class ConnectorInfoBase implements JsonSerializable { + /* + * Connector Type + */ + private ConnectorType connectorType; + + /* + * Connector Class + */ + private ConnectorClass connectorClass; + + /* + * Connector Name + */ + private String connectorName; + + /* + * Connector Id + */ + private String connectorId; + + /* + * Connector Status + */ + private ConnectorStatus connectorState; + + /** + * Creates an instance of ConnectorInfoBase class. + */ + public ConnectorInfoBase() { + } + + /** + * Get the connectorType property: Connector Type. + * + * @return the connectorType value. + */ + public ConnectorType connectorType() { + return this.connectorType; + } + + /** + * Set the connectorType property: Connector Type. + * + * @param connectorType the connectorType value to set. + * @return the ConnectorInfoBase object itself. + */ + public ConnectorInfoBase withConnectorType(ConnectorType connectorType) { + this.connectorType = connectorType; + return this; + } + + /** + * Get the connectorClass property: Connector Class. + * + * @return the connectorClass value. + */ + public ConnectorClass connectorClass() { + return this.connectorClass; + } + + /** + * Set the connectorClass property: Connector Class. + * + * @param connectorClass the connectorClass value to set. + * @return the ConnectorInfoBase object itself. + */ + public ConnectorInfoBase withConnectorClass(ConnectorClass connectorClass) { + this.connectorClass = connectorClass; + return this; + } + + /** + * Get the connectorName property: Connector Name. + * + * @return the connectorName value. + */ + public String connectorName() { + return this.connectorName; + } + + /** + * Set the connectorName property: Connector Name. + * + * @param connectorName the connectorName value to set. + * @return the ConnectorInfoBase object itself. + */ + public ConnectorInfoBase withConnectorName(String connectorName) { + this.connectorName = connectorName; + return this; + } + + /** + * Get the connectorId property: Connector Id. + * + * @return the connectorId value. + */ + public String connectorId() { + return this.connectorId; + } + + /** + * Set the connectorId property: Connector Id. + * + * @param connectorId the connectorId value to set. + * @return the ConnectorInfoBase object itself. + */ + public ConnectorInfoBase withConnectorId(String connectorId) { + this.connectorId = connectorId; + return this; + } + + /** + * Get the connectorState property: Connector Status. + * + * @return the connectorState value. + */ + public ConnectorStatus connectorState() { + return this.connectorState; + } + + /** + * Set the connectorState property: Connector Status. + * + * @param connectorState the connectorState value to set. + * @return the ConnectorInfoBase object itself. + */ + public ConnectorInfoBase withConnectorState(ConnectorStatus connectorState) { + this.connectorState = connectorState; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorType", this.connectorType == null ? null : this.connectorType.toString()); + jsonWriter.writeStringField("connectorClass", + this.connectorClass == null ? null : this.connectorClass.toString()); + jsonWriter.writeStringField("connectorName", this.connectorName); + jsonWriter.writeStringField("connectorId", this.connectorId); + jsonWriter.writeStringField("connectorState", + this.connectorState == null ? null : this.connectorState.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConnectorInfoBase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConnectorInfoBase if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the ConnectorInfoBase. + */ + public static ConnectorInfoBase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConnectorInfoBase deserializedConnectorInfoBase = new ConnectorInfoBase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorType".equals(fieldName)) { + deserializedConnectorInfoBase.connectorType = ConnectorType.fromString(reader.getString()); + } else if ("connectorClass".equals(fieldName)) { + deserializedConnectorInfoBase.connectorClass = ConnectorClass.fromString(reader.getString()); + } else if ("connectorName".equals(fieldName)) { + deserializedConnectorInfoBase.connectorName = reader.getString(); + } else if ("connectorId".equals(fieldName)) { + deserializedConnectorInfoBase.connectorId = reader.getString(); + } else if ("connectorState".equals(fieldName)) { + deserializedConnectorInfoBase.connectorState = ConnectorStatus.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedConnectorInfoBase; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorResource.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorResource.java new file mode 100644 index 000000000000..74467a89fcc1 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorResource.java @@ -0,0 +1,260 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.management.SystemData; +import com.azure.core.util.Context; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; + +/** + * An immutable client-side representation of ConnectorResource. + */ +public interface ConnectorResource { + /** + * Gets the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + String id(); + + /** + * Gets the name property: The name of the resource. + * + * @return the name value. + */ + String name(); + + /** + * Gets the type property: The type of the resource. + * + * @return the type value. + */ + String type(); + + /** + * Gets the systemData property: Azure Resource Manager metadata containing createdBy and modifiedBy information. + * + * @return the systemData value. + */ + SystemData systemData(); + + /** + * Gets the connectorBasicInfo property: Connector Info Base. + * + * @return the connectorBasicInfo value. + */ + ConnectorInfoBase connectorBasicInfo(); + + /** + * Gets the connectorServiceTypeInfo property: Connector Service type info base properties. + * + * @return the connectorServiceTypeInfo value. + */ + ConnectorServiceTypeInfoBase connectorServiceTypeInfo(); + + /** + * Gets the partnerConnectorInfo property: The connection information consumed by applications. + * + * @return the partnerConnectorInfo value. + */ + PartnerInfoBase partnerConnectorInfo(); + + /** + * Gets the name of the resource group. + * + * @return the name of the resource group. + */ + String resourceGroupName(); + + /** + * Gets the inner com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner object. + * + * @return the inner object. + */ + ConnectorResourceInner innerModel(); + + /** + * The entirety of the ConnectorResource definition. + */ + interface Definition + extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate { + } + + /** + * The ConnectorResource definition stages. + */ + interface DefinitionStages { + /** + * The first stage of the ConnectorResource definition. + */ + interface Blank extends WithParentResource { + } + + /** + * The stage of the ConnectorResource definition allowing to specify parent resource. + */ + interface WithParentResource { + /** + * Specifies resourceGroupName, organizationName, environmentId, clusterId. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @return the next definition stage. + */ + WithCreate withExistingCluster(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + } + + /** + * The stage of the ConnectorResource definition which contains all the minimum required properties for the + * resource to be created, but also allows for any other optional properties to be specified. + */ + interface WithCreate extends DefinitionStages.WithConnectorBasicInfo, + DefinitionStages.WithConnectorServiceTypeInfo, DefinitionStages.WithPartnerConnectorInfo { + /** + * Executes the create request. + * + * @return the created resource. + */ + ConnectorResource create(); + + /** + * Executes the create request. + * + * @param context The context to associate with this operation. + * @return the created resource. + */ + ConnectorResource create(Context context); + } + + /** + * The stage of the ConnectorResource definition allowing to specify connectorBasicInfo. + */ + interface WithConnectorBasicInfo { + /** + * Specifies the connectorBasicInfo property: Connector Info Base. + * + * @param connectorBasicInfo Connector Info Base. + * @return the next definition stage. + */ + WithCreate withConnectorBasicInfo(ConnectorInfoBase connectorBasicInfo); + } + + /** + * The stage of the ConnectorResource definition allowing to specify connectorServiceTypeInfo. + */ + interface WithConnectorServiceTypeInfo { + /** + * Specifies the connectorServiceTypeInfo property: Connector Service type info base properties.. + * + * @param connectorServiceTypeInfo Connector Service type info base properties. + * @return the next definition stage. + */ + WithCreate withConnectorServiceTypeInfo(ConnectorServiceTypeInfoBase connectorServiceTypeInfo); + } + + /** + * The stage of the ConnectorResource definition allowing to specify partnerConnectorInfo. + */ + interface WithPartnerConnectorInfo { + /** + * Specifies the partnerConnectorInfo property: The connection information consumed by applications.. + * + * @param partnerConnectorInfo The connection information consumed by applications. + * @return the next definition stage. + */ + WithCreate withPartnerConnectorInfo(PartnerInfoBase partnerConnectorInfo); + } + } + + /** + * Begins update for the ConnectorResource resource. + * + * @return the stage of resource update. + */ + ConnectorResource.Update update(); + + /** + * The template for ConnectorResource update. + */ + interface Update extends UpdateStages.WithConnectorBasicInfo, UpdateStages.WithConnectorServiceTypeInfo, + UpdateStages.WithPartnerConnectorInfo { + /** + * Executes the update request. + * + * @return the updated resource. + */ + ConnectorResource apply(); + + /** + * Executes the update request. + * + * @param context The context to associate with this operation. + * @return the updated resource. + */ + ConnectorResource apply(Context context); + } + + /** + * The ConnectorResource update stages. + */ + interface UpdateStages { + /** + * The stage of the ConnectorResource update allowing to specify connectorBasicInfo. + */ + interface WithConnectorBasicInfo { + /** + * Specifies the connectorBasicInfo property: Connector Info Base. + * + * @param connectorBasicInfo Connector Info Base. + * @return the next definition stage. + */ + Update withConnectorBasicInfo(ConnectorInfoBase connectorBasicInfo); + } + + /** + * The stage of the ConnectorResource update allowing to specify connectorServiceTypeInfo. + */ + interface WithConnectorServiceTypeInfo { + /** + * Specifies the connectorServiceTypeInfo property: Connector Service type info base properties.. + * + * @param connectorServiceTypeInfo Connector Service type info base properties. + * @return the next definition stage. + */ + Update withConnectorServiceTypeInfo(ConnectorServiceTypeInfoBase connectorServiceTypeInfo); + } + + /** + * The stage of the ConnectorResource update allowing to specify partnerConnectorInfo. + */ + interface WithPartnerConnectorInfo { + /** + * Specifies the partnerConnectorInfo property: The connection information consumed by applications.. + * + * @param partnerConnectorInfo The connection information consumed by applications. + * @return the next definition stage. + */ + Update withPartnerConnectorInfo(PartnerInfoBase partnerConnectorInfo); + } + } + + /** + * Refreshes the resource to sync with Azure. + * + * @return the refreshed resource. + */ + ConnectorResource refresh(); + + /** + * Refreshes the resource to sync with Azure. + * + * @param context The context to associate with this operation. + * @return the refreshed resource. + */ + ConnectorResource refresh(Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceType.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceType.java new file mode 100644 index 000000000000..e6242fe833d8 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceType.java @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * The connector service type. + */ +public final class ConnectorServiceType extends ExpandableStringEnum { + /** + * Static value AzureBlobStorageSinkConnector for ConnectorServiceType. + */ + public static final ConnectorServiceType AZURE_BLOB_STORAGE_SINK_CONNECTOR + = fromString("AzureBlobStorageSinkConnector"); + + /** + * Static value AzureBlobStorageSourceConnector for ConnectorServiceType. + */ + public static final ConnectorServiceType AZURE_BLOB_STORAGE_SOURCE_CONNECTOR + = fromString("AzureBlobStorageSourceConnector"); + + /** + * Static value AzureCosmosDBSinkConnector for ConnectorServiceType. + */ + public static final ConnectorServiceType AZURE_COSMOS_DBSINK_CONNECTOR = fromString("AzureCosmosDBSinkConnector"); + + /** + * Static value AzureCosmosDBSourceConnector for ConnectorServiceType. + */ + public static final ConnectorServiceType AZURE_COSMOS_DBSOURCE_CONNECTOR + = fromString("AzureCosmosDBSourceConnector"); + + /** + * Static value AzureSynapseAnalyticsSinkConnector for ConnectorServiceType. + */ + public static final ConnectorServiceType AZURE_SYNAPSE_ANALYTICS_SINK_CONNECTOR + = fromString("AzureSynapseAnalyticsSinkConnector"); + + /** + * Creates a new instance of ConnectorServiceType value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public ConnectorServiceType() { + } + + /** + * Creates or finds a ConnectorServiceType from its string representation. + * + * @param name a name to look for. + * @return the corresponding ConnectorServiceType. + */ + public static ConnectorServiceType fromString(String name) { + return fromString(name, ConnectorServiceType.class); + } + + /** + * Gets known ConnectorServiceType values. + * + * @return known ConnectorServiceType values. + */ + public static Collection values() { + return values(ConnectorServiceType.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceTypeInfoBase.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceTypeInfoBase.java new file mode 100644 index 000000000000..e3262eb14ba9 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorServiceTypeInfoBase.java @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Immutable; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The connector service type info. + */ +@Immutable +public class ConnectorServiceTypeInfoBase implements JsonSerializable { + /* + * The connector service type. + */ + private ConnectorServiceType connectorServiceType = ConnectorServiceType.fromString("ConnectorServiceTypeInfoBase"); + + /** + * Creates an instance of ConnectorServiceTypeInfoBase class. + */ + public ConnectorServiceTypeInfoBase() { + } + + /** + * Get the connectorServiceType property: The connector service type. + * + * @return the connectorServiceType value. + */ + public ConnectorServiceType connectorServiceType() { + return this.connectorServiceType; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("connectorServiceType", + this.connectorServiceType == null ? null : this.connectorServiceType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ConnectorServiceTypeInfoBase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ConnectorServiceTypeInfoBase if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ConnectorServiceTypeInfoBase. + */ + public static ConnectorServiceTypeInfoBase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("connectorServiceType".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("AzureBlobStorageSinkConnector".equals(discriminatorValue)) { + return AzureBlobStorageSinkConnectorServiceInfo.fromJson(readerToUse.reset()); + } else if ("AzureBlobStorageSourceConnector".equals(discriminatorValue)) { + return AzureBlobStorageSourceConnectorServiceInfo.fromJson(readerToUse.reset()); + } else if ("AzureCosmosDBSinkConnector".equals(discriminatorValue)) { + return AzureCosmosDBSinkConnectorServiceInfo.fromJson(readerToUse.reset()); + } else if ("AzureCosmosDBSourceConnector".equals(discriminatorValue)) { + return AzureCosmosDBSourceConnectorServiceInfo.fromJson(readerToUse.reset()); + } else if ("AzureSynapseAnalyticsSinkConnector".equals(discriminatorValue)) { + return AzureSynapseAnalyticsSinkConnectorServiceInfo.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static ConnectorServiceTypeInfoBase fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ConnectorServiceTypeInfoBase deserializedConnectorServiceTypeInfoBase = new ConnectorServiceTypeInfoBase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("connectorServiceType".equals(fieldName)) { + deserializedConnectorServiceTypeInfoBase.connectorServiceType + = ConnectorServiceType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedConnectorServiceTypeInfoBase; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorStatus.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorStatus.java new file mode 100644 index 000000000000..d24d5d22d0b6 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorStatus.java @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * ConnectorStatus + * + * Connector Status. + */ +public final class ConnectorStatus extends ExpandableStringEnum { + /** + * Static value PROVISIONING for ConnectorStatus. + */ + public static final ConnectorStatus PROVISIONING = fromString("PROVISIONING"); + + /** + * Static value RUNNING for ConnectorStatus. + */ + public static final ConnectorStatus RUNNING = fromString("RUNNING"); + + /** + * Static value PAUSED for ConnectorStatus. + */ + public static final ConnectorStatus PAUSED = fromString("PAUSED"); + + /** + * Static value FAILED for ConnectorStatus. + */ + public static final ConnectorStatus FAILED = fromString("FAILED"); + + /** + * Creates a new instance of ConnectorStatus value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public ConnectorStatus() { + } + + /** + * Creates or finds a ConnectorStatus from its string representation. + * + * @param name a name to look for. + * @return the corresponding ConnectorStatus. + */ + public static ConnectorStatus fromString(String name) { + return fromString(name, ConnectorStatus.class); + } + + /** + * Gets known ConnectorStatus values. + * + * @return known ConnectorStatus values. + */ + public static Collection values() { + return values(ConnectorStatus.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorType.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorType.java new file mode 100644 index 000000000000..75dfeb51bc5c --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ConnectorType.java @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * ConnectorType + * + * Connector Type. + */ +public final class ConnectorType extends ExpandableStringEnum { + /** + * Static value SINK for ConnectorType. + */ + public static final ConnectorType SINK = fromString("SINK"); + + /** + * Static value SOURCE for ConnectorType. + */ + public static final ConnectorType SOURCE = fromString("SOURCE"); + + /** + * Creates a new instance of ConnectorType value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public ConnectorType() { + } + + /** + * Creates or finds a ConnectorType from its string representation. + * + * @param name a name to look for. + * @return the corresponding ConnectorType. + */ + public static ConnectorType fromString(String name) { + return fromString(name, ConnectorType.class); + } + + /** + * Gets known ConnectorType values. + * + * @return known ConnectorType values. + */ + public static Collection values() { + return values(ConnectorType.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Connectors.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Connectors.java new file mode 100644 index 000000000000..8fbbd8bc4c62 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Connectors.java @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.util.Context; + +/** + * Resource collection API of Connectors. + */ +public interface Connectors { + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response}. + */ + Response getWithResponse(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String connectorName, Context context); + + /** + * Get confluent connector by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name. + */ + ConnectorResource get(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName); + + /** + * Delete confluent connector by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param connectorName Confluent connector name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String connectorName, Context context); + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + + /** + * Lists all the connectors in a cluster. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list connectors in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context); + + /** + * Get confluent connector by Name. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response}. + */ + ConnectorResource getById(String id); + + /** + * Get confluent connector by Name. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent connector by Name along with {@link Response}. + */ + Response getByIdWithResponse(String id, Context context); + + /** + * Delete confluent connector by name. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteById(String id); + + /** + * Delete confluent connector by name. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteByIdWithResponse(String id, Context context); + + /** + * Begins definition for a new ConnectorResource resource. + * + * @param name resource name. + * @return the first stage of the new ConnectorResource definition. + */ + ConnectorResource.DefinitionStages.Blank define(String name); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/DataFormatType.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/DataFormatType.java new file mode 100644 index 000000000000..39177dd8a461 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/DataFormatType.java @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * DataFormatType + * + * Data Format Type. + */ +public final class DataFormatType extends ExpandableStringEnum { + /** + * Static value AVRO for DataFormatType. + */ + public static final DataFormatType AVRO = fromString("AVRO"); + + /** + * Static value JSON for DataFormatType. + */ + public static final DataFormatType JSON = fromString("JSON"); + + /** + * Static value STRING for DataFormatType. + */ + public static final DataFormatType STRING = fromString("STRING"); + + /** + * Static value BYTES for DataFormatType. + */ + public static final DataFormatType BYTES = fromString("BYTES"); + + /** + * Static value PROTOBUF for DataFormatType. + */ + public static final DataFormatType PROTOBUF = fromString("PROTOBUF"); + + /** + * Creates a new instance of DataFormatType value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public DataFormatType() { + } + + /** + * Creates or finds a DataFormatType from its string representation. + * + * @param name a name to look for. + * @return the corresponding DataFormatType. + */ + public static DataFormatType fromString(String name) { + return fromString(name, DataFormatType.class); + } + + /** + * Gets known DataFormatType values. + * + * @return known DataFormatType values. + */ + public static Collection values() { + return values(DataFormatType.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Environments.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Environments.java new file mode 100644 index 000000000000..198b64695668 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Environments.java @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.Context; + +/** + * Resource collection API of Environments. + */ +public interface Environments { + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId); + + /** + * Delete confluent environment by id. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, Context context); + + /** + * Delete confluent environment by id. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteById(String id); + + /** + * Delete confluent environment by id. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteByIdWithResponse(String id, Context context); + + /** + * Begins definition for a new SCEnvironmentRecord resource. + * + * @param name resource name. + * @return the first stage of the new SCEnvironmentRecord definition. + */ + SCEnvironmentRecord.DefinitionStages.Blank define(String name); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSinkConnectorInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSinkConnectorInfo.java new file mode 100644 index 000000000000..44a608ddaa72 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSinkConnectorInfo.java @@ -0,0 +1,399 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.List; + +/** + * The partner connector type is KafkaAzureBlobStorageSink. + */ +@Fluent +public final class KafkaAzureBlobStorageSinkConnectorInfo extends PartnerInfoBase { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.KAFKA_AZURE_BLOB_STORAGE_SINK; + + /* + * Kafka Auth Type + */ + private AuthType authType; + + /* + * Kafka Input Data Format Type + */ + private DataFormatType inputFormat; + + /* + * Kafka Output Data Format Type + */ + private DataFormatType outputFormat; + + /* + * Kafka API Key + */ + private String apiKey; + + /* + * Kafka API Key Secret + */ + private String apiSecret; + + /* + * Kafka Service Account Id + */ + private String serviceAccountId; + + /* + * Kafka topics list + */ + private List topics; + + /* + * Kafka topics directory + */ + private String topicsDir; + + /* + * Flush size + */ + private String flushSize; + + /* + * Maximum Tasks + */ + private String maxTasks; + + /* + * Time Interval + */ + private String timeInterval; + + /** + * Creates an instance of KafkaAzureBlobStorageSinkConnectorInfo class. + */ + public KafkaAzureBlobStorageSinkConnectorInfo() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + @Override + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Get the authType property: Kafka Auth Type. + * + * @return the authType value. + */ + public AuthType authType() { + return this.authType; + } + + /** + * Set the authType property: Kafka Auth Type. + * + * @param authType the authType value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withAuthType(AuthType authType) { + this.authType = authType; + return this; + } + + /** + * Get the inputFormat property: Kafka Input Data Format Type. + * + * @return the inputFormat value. + */ + public DataFormatType inputFormat() { + return this.inputFormat; + } + + /** + * Set the inputFormat property: Kafka Input Data Format Type. + * + * @param inputFormat the inputFormat value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withInputFormat(DataFormatType inputFormat) { + this.inputFormat = inputFormat; + return this; + } + + /** + * Get the outputFormat property: Kafka Output Data Format Type. + * + * @return the outputFormat value. + */ + public DataFormatType outputFormat() { + return this.outputFormat; + } + + /** + * Set the outputFormat property: Kafka Output Data Format Type. + * + * @param outputFormat the outputFormat value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withOutputFormat(DataFormatType outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + /** + * Get the apiKey property: Kafka API Key. + * + * @return the apiKey value. + */ + public String apiKey() { + return this.apiKey; + } + + /** + * Set the apiKey property: Kafka API Key. + * + * @param apiKey the apiKey value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withApiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + /** + * Get the apiSecret property: Kafka API Key Secret. + * + * @return the apiSecret value. + */ + public String apiSecret() { + return this.apiSecret; + } + + /** + * Set the apiSecret property: Kafka API Key Secret. + * + * @param apiSecret the apiSecret value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withApiSecret(String apiSecret) { + this.apiSecret = apiSecret; + return this; + } + + /** + * Get the serviceAccountId property: Kafka Service Account Id. + * + * @return the serviceAccountId value. + */ + public String serviceAccountId() { + return this.serviceAccountId; + } + + /** + * Set the serviceAccountId property: Kafka Service Account Id. + * + * @param serviceAccountId the serviceAccountId value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withServiceAccountId(String serviceAccountId) { + this.serviceAccountId = serviceAccountId; + return this; + } + + /** + * Get the topics property: Kafka topics list. + * + * @return the topics value. + */ + public List topics() { + return this.topics; + } + + /** + * Set the topics property: Kafka topics list. + * + * @param topics the topics value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withTopics(List topics) { + this.topics = topics; + return this; + } + + /** + * Get the topicsDir property: Kafka topics directory. + * + * @return the topicsDir value. + */ + public String topicsDir() { + return this.topicsDir; + } + + /** + * Set the topicsDir property: Kafka topics directory. + * + * @param topicsDir the topicsDir value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withTopicsDir(String topicsDir) { + this.topicsDir = topicsDir; + return this; + } + + /** + * Get the flushSize property: Flush size. + * + * @return the flushSize value. + */ + public String flushSize() { + return this.flushSize; + } + + /** + * Set the flushSize property: Flush size. + * + * @param flushSize the flushSize value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withFlushSize(String flushSize) { + this.flushSize = flushSize; + return this; + } + + /** + * Get the maxTasks property: Maximum Tasks. + * + * @return the maxTasks value. + */ + public String maxTasks() { + return this.maxTasks; + } + + /** + * Set the maxTasks property: Maximum Tasks. + * + * @param maxTasks the maxTasks value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withMaxTasks(String maxTasks) { + this.maxTasks = maxTasks; + return this; + } + + /** + * Get the timeInterval property: Time Interval. + * + * @return the timeInterval value. + */ + public String timeInterval() { + return this.timeInterval; + } + + /** + * Set the timeInterval property: Time Interval. + * + * @param timeInterval the timeInterval value to set. + * @return the KafkaAzureBlobStorageSinkConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSinkConnectorInfo withTimeInterval(String timeInterval) { + this.timeInterval = timeInterval; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + jsonWriter.writeStringField("authType", this.authType == null ? null : this.authType.toString()); + jsonWriter.writeStringField("inputFormat", this.inputFormat == null ? null : this.inputFormat.toString()); + jsonWriter.writeStringField("outputFormat", this.outputFormat == null ? null : this.outputFormat.toString()); + jsonWriter.writeStringField("apiKey", this.apiKey); + jsonWriter.writeStringField("apiSecret", this.apiSecret); + jsonWriter.writeStringField("serviceAccountId", this.serviceAccountId); + jsonWriter.writeArrayField("topics", this.topics, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("topicsDir", this.topicsDir); + jsonWriter.writeStringField("flushSize", this.flushSize); + jsonWriter.writeStringField("maxTasks", this.maxTasks); + jsonWriter.writeStringField("timeInterval", this.timeInterval); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of KafkaAzureBlobStorageSinkConnectorInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of KafkaAzureBlobStorageSinkConnectorInfo if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the KafkaAzureBlobStorageSinkConnectorInfo. + */ + public static KafkaAzureBlobStorageSinkConnectorInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + KafkaAzureBlobStorageSinkConnectorInfo deserializedKafkaAzureBlobStorageSinkConnectorInfo + = new KafkaAzureBlobStorageSinkConnectorInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else if ("authType".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.authType + = AuthType.fromString(reader.getString()); + } else if ("inputFormat".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.inputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("outputFormat".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.outputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("apiKey".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.apiKey = reader.getString(); + } else if ("apiSecret".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.apiSecret = reader.getString(); + } else if ("serviceAccountId".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.serviceAccountId = reader.getString(); + } else if ("topics".equals(fieldName)) { + List topics = reader.readArray(reader1 -> reader1.getString()); + deserializedKafkaAzureBlobStorageSinkConnectorInfo.topics = topics; + } else if ("topicsDir".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.topicsDir = reader.getString(); + } else if ("flushSize".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.flushSize = reader.getString(); + } else if ("maxTasks".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.maxTasks = reader.getString(); + } else if ("timeInterval".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSinkConnectorInfo.timeInterval = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedKafkaAzureBlobStorageSinkConnectorInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSourceConnectorInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSourceConnectorInfo.java new file mode 100644 index 000000000000..4813826034b2 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureBlobStorageSourceConnectorInfo.java @@ -0,0 +1,341 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The partner connector type is KafkaAzureBlobStorageSource. + */ +@Fluent +public final class KafkaAzureBlobStorageSourceConnectorInfo extends PartnerInfoBase { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.KAFKA_AZURE_BLOB_STORAGE_SOURCE; + + /* + * Kafka Auth Type + */ + private AuthType authType; + + /* + * Kafka Input Data Format Type + */ + private DataFormatType inputFormat; + + /* + * Kafka Output Data Format Type + */ + private DataFormatType outputFormat; + + /* + * Kafka API Key + */ + private String apiKey; + + /* + * Kafka API Secret + */ + private String apiSecret; + + /* + * Kafka Service Account Id + */ + private String serviceAccountId; + + /* + * Kafka topics Regex pattern + */ + private String topicRegex; + + /* + * Kafka topics directory + */ + private String topicsDir; + + /* + * Maximum Tasks + */ + private String maxTasks; + + /** + * Creates an instance of KafkaAzureBlobStorageSourceConnectorInfo class. + */ + public KafkaAzureBlobStorageSourceConnectorInfo() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + @Override + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Get the authType property: Kafka Auth Type. + * + * @return the authType value. + */ + public AuthType authType() { + return this.authType; + } + + /** + * Set the authType property: Kafka Auth Type. + * + * @param authType the authType value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withAuthType(AuthType authType) { + this.authType = authType; + return this; + } + + /** + * Get the inputFormat property: Kafka Input Data Format Type. + * + * @return the inputFormat value. + */ + public DataFormatType inputFormat() { + return this.inputFormat; + } + + /** + * Set the inputFormat property: Kafka Input Data Format Type. + * + * @param inputFormat the inputFormat value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withInputFormat(DataFormatType inputFormat) { + this.inputFormat = inputFormat; + return this; + } + + /** + * Get the outputFormat property: Kafka Output Data Format Type. + * + * @return the outputFormat value. + */ + public DataFormatType outputFormat() { + return this.outputFormat; + } + + /** + * Set the outputFormat property: Kafka Output Data Format Type. + * + * @param outputFormat the outputFormat value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withOutputFormat(DataFormatType outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + /** + * Get the apiKey property: Kafka API Key. + * + * @return the apiKey value. + */ + public String apiKey() { + return this.apiKey; + } + + /** + * Set the apiKey property: Kafka API Key. + * + * @param apiKey the apiKey value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withApiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + /** + * Get the apiSecret property: Kafka API Secret. + * + * @return the apiSecret value. + */ + public String apiSecret() { + return this.apiSecret; + } + + /** + * Set the apiSecret property: Kafka API Secret. + * + * @param apiSecret the apiSecret value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withApiSecret(String apiSecret) { + this.apiSecret = apiSecret; + return this; + } + + /** + * Get the serviceAccountId property: Kafka Service Account Id. + * + * @return the serviceAccountId value. + */ + public String serviceAccountId() { + return this.serviceAccountId; + } + + /** + * Set the serviceAccountId property: Kafka Service Account Id. + * + * @param serviceAccountId the serviceAccountId value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withServiceAccountId(String serviceAccountId) { + this.serviceAccountId = serviceAccountId; + return this; + } + + /** + * Get the topicRegex property: Kafka topics Regex pattern. + * + * @return the topicRegex value. + */ + public String topicRegex() { + return this.topicRegex; + } + + /** + * Set the topicRegex property: Kafka topics Regex pattern. + * + * @param topicRegex the topicRegex value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withTopicRegex(String topicRegex) { + this.topicRegex = topicRegex; + return this; + } + + /** + * Get the topicsDir property: Kafka topics directory. + * + * @return the topicsDir value. + */ + public String topicsDir() { + return this.topicsDir; + } + + /** + * Set the topicsDir property: Kafka topics directory. + * + * @param topicsDir the topicsDir value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withTopicsDir(String topicsDir) { + this.topicsDir = topicsDir; + return this; + } + + /** + * Get the maxTasks property: Maximum Tasks. + * + * @return the maxTasks value. + */ + public String maxTasks() { + return this.maxTasks; + } + + /** + * Set the maxTasks property: Maximum Tasks. + * + * @param maxTasks the maxTasks value to set. + * @return the KafkaAzureBlobStorageSourceConnectorInfo object itself. + */ + public KafkaAzureBlobStorageSourceConnectorInfo withMaxTasks(String maxTasks) { + this.maxTasks = maxTasks; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + jsonWriter.writeStringField("authType", this.authType == null ? null : this.authType.toString()); + jsonWriter.writeStringField("inputFormat", this.inputFormat == null ? null : this.inputFormat.toString()); + jsonWriter.writeStringField("outputFormat", this.outputFormat == null ? null : this.outputFormat.toString()); + jsonWriter.writeStringField("apiKey", this.apiKey); + jsonWriter.writeStringField("apiSecret", this.apiSecret); + jsonWriter.writeStringField("serviceAccountId", this.serviceAccountId); + jsonWriter.writeStringField("topicRegex", this.topicRegex); + jsonWriter.writeStringField("topicsDir", this.topicsDir); + jsonWriter.writeStringField("maxTasks", this.maxTasks); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of KafkaAzureBlobStorageSourceConnectorInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of KafkaAzureBlobStorageSourceConnectorInfo if the JsonReader was pointing to an instance of + * it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the KafkaAzureBlobStorageSourceConnectorInfo. + */ + public static KafkaAzureBlobStorageSourceConnectorInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + KafkaAzureBlobStorageSourceConnectorInfo deserializedKafkaAzureBlobStorageSourceConnectorInfo + = new KafkaAzureBlobStorageSourceConnectorInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else if ("authType".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.authType + = AuthType.fromString(reader.getString()); + } else if ("inputFormat".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.inputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("outputFormat".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.outputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("apiKey".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.apiKey = reader.getString(); + } else if ("apiSecret".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.apiSecret = reader.getString(); + } else if ("serviceAccountId".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.serviceAccountId = reader.getString(); + } else if ("topicRegex".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.topicRegex = reader.getString(); + } else if ("topicsDir".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.topicsDir = reader.getString(); + } else if ("maxTasks".equals(fieldName)) { + deserializedKafkaAzureBlobStorageSourceConnectorInfo.maxTasks = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedKafkaAzureBlobStorageSourceConnectorInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSinkConnectorInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSinkConnectorInfo.java new file mode 100644 index 000000000000..5a7f312817c5 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSinkConnectorInfo.java @@ -0,0 +1,398 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.List; + +/** + * The partner connector type is KafkaAzureCosmosDBSink. + */ +@Fluent +public final class KafkaAzureCosmosDBSinkConnectorInfo extends PartnerInfoBase { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.KAFKA_AZURE_COSMOS_DBSINK; + + /* + * Kafka Auth Type + */ + private AuthType authType; + + /* + * Kafka Input Data Format Type + */ + private DataFormatType inputFormat; + + /* + * Kafka Output Data Format Type + */ + private DataFormatType outputFormat; + + /* + * Kafka API Key + */ + private String apiKey; + + /* + * Kafka API Key Secret + */ + private String apiSecret; + + /* + * Kafka Service Account Id + */ + private String serviceAccountId; + + /* + * Kafka topics list + */ + private List topics; + + /* + * Kafka topics directory + */ + private String topicsDir; + + /* + * Flush size + */ + private String flushSize; + + /* + * Maximum Tasks + */ + private String maxTasks; + + /* + * Time Interval + */ + private String timeInterval; + + /** + * Creates an instance of KafkaAzureCosmosDBSinkConnectorInfo class. + */ + public KafkaAzureCosmosDBSinkConnectorInfo() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + @Override + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Get the authType property: Kafka Auth Type. + * + * @return the authType value. + */ + public AuthType authType() { + return this.authType; + } + + /** + * Set the authType property: Kafka Auth Type. + * + * @param authType the authType value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withAuthType(AuthType authType) { + this.authType = authType; + return this; + } + + /** + * Get the inputFormat property: Kafka Input Data Format Type. + * + * @return the inputFormat value. + */ + public DataFormatType inputFormat() { + return this.inputFormat; + } + + /** + * Set the inputFormat property: Kafka Input Data Format Type. + * + * @param inputFormat the inputFormat value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withInputFormat(DataFormatType inputFormat) { + this.inputFormat = inputFormat; + return this; + } + + /** + * Get the outputFormat property: Kafka Output Data Format Type. + * + * @return the outputFormat value. + */ + public DataFormatType outputFormat() { + return this.outputFormat; + } + + /** + * Set the outputFormat property: Kafka Output Data Format Type. + * + * @param outputFormat the outputFormat value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withOutputFormat(DataFormatType outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + /** + * Get the apiKey property: Kafka API Key. + * + * @return the apiKey value. + */ + public String apiKey() { + return this.apiKey; + } + + /** + * Set the apiKey property: Kafka API Key. + * + * @param apiKey the apiKey value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withApiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + /** + * Get the apiSecret property: Kafka API Key Secret. + * + * @return the apiSecret value. + */ + public String apiSecret() { + return this.apiSecret; + } + + /** + * Set the apiSecret property: Kafka API Key Secret. + * + * @param apiSecret the apiSecret value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withApiSecret(String apiSecret) { + this.apiSecret = apiSecret; + return this; + } + + /** + * Get the serviceAccountId property: Kafka Service Account Id. + * + * @return the serviceAccountId value. + */ + public String serviceAccountId() { + return this.serviceAccountId; + } + + /** + * Set the serviceAccountId property: Kafka Service Account Id. + * + * @param serviceAccountId the serviceAccountId value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withServiceAccountId(String serviceAccountId) { + this.serviceAccountId = serviceAccountId; + return this; + } + + /** + * Get the topics property: Kafka topics list. + * + * @return the topics value. + */ + public List topics() { + return this.topics; + } + + /** + * Set the topics property: Kafka topics list. + * + * @param topics the topics value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withTopics(List topics) { + this.topics = topics; + return this; + } + + /** + * Get the topicsDir property: Kafka topics directory. + * + * @return the topicsDir value. + */ + public String topicsDir() { + return this.topicsDir; + } + + /** + * Set the topicsDir property: Kafka topics directory. + * + * @param topicsDir the topicsDir value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withTopicsDir(String topicsDir) { + this.topicsDir = topicsDir; + return this; + } + + /** + * Get the flushSize property: Flush size. + * + * @return the flushSize value. + */ + public String flushSize() { + return this.flushSize; + } + + /** + * Set the flushSize property: Flush size. + * + * @param flushSize the flushSize value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withFlushSize(String flushSize) { + this.flushSize = flushSize; + return this; + } + + /** + * Get the maxTasks property: Maximum Tasks. + * + * @return the maxTasks value. + */ + public String maxTasks() { + return this.maxTasks; + } + + /** + * Set the maxTasks property: Maximum Tasks. + * + * @param maxTasks the maxTasks value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withMaxTasks(String maxTasks) { + this.maxTasks = maxTasks; + return this; + } + + /** + * Get the timeInterval property: Time Interval. + * + * @return the timeInterval value. + */ + public String timeInterval() { + return this.timeInterval; + } + + /** + * Set the timeInterval property: Time Interval. + * + * @param timeInterval the timeInterval value to set. + * @return the KafkaAzureCosmosDBSinkConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSinkConnectorInfo withTimeInterval(String timeInterval) { + this.timeInterval = timeInterval; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + jsonWriter.writeStringField("authType", this.authType == null ? null : this.authType.toString()); + jsonWriter.writeStringField("inputFormat", this.inputFormat == null ? null : this.inputFormat.toString()); + jsonWriter.writeStringField("outputFormat", this.outputFormat == null ? null : this.outputFormat.toString()); + jsonWriter.writeStringField("apiKey", this.apiKey); + jsonWriter.writeStringField("apiSecret", this.apiSecret); + jsonWriter.writeStringField("serviceAccountId", this.serviceAccountId); + jsonWriter.writeArrayField("topics", this.topics, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("topicsDir", this.topicsDir); + jsonWriter.writeStringField("flushSize", this.flushSize); + jsonWriter.writeStringField("maxTasks", this.maxTasks); + jsonWriter.writeStringField("timeInterval", this.timeInterval); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of KafkaAzureCosmosDBSinkConnectorInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of KafkaAzureCosmosDBSinkConnectorInfo if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the KafkaAzureCosmosDBSinkConnectorInfo. + */ + public static KafkaAzureCosmosDBSinkConnectorInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + KafkaAzureCosmosDBSinkConnectorInfo deserializedKafkaAzureCosmosDBSinkConnectorInfo + = new KafkaAzureCosmosDBSinkConnectorInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else if ("authType".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.authType = AuthType.fromString(reader.getString()); + } else if ("inputFormat".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.inputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("outputFormat".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.outputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("apiKey".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.apiKey = reader.getString(); + } else if ("apiSecret".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.apiSecret = reader.getString(); + } else if ("serviceAccountId".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.serviceAccountId = reader.getString(); + } else if ("topics".equals(fieldName)) { + List topics = reader.readArray(reader1 -> reader1.getString()); + deserializedKafkaAzureCosmosDBSinkConnectorInfo.topics = topics; + } else if ("topicsDir".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.topicsDir = reader.getString(); + } else if ("flushSize".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.flushSize = reader.getString(); + } else if ("maxTasks".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.maxTasks = reader.getString(); + } else if ("timeInterval".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSinkConnectorInfo.timeInterval = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedKafkaAzureCosmosDBSinkConnectorInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSourceConnectorInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSourceConnectorInfo.java new file mode 100644 index 000000000000..2e7395929cce --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureCosmosDBSourceConnectorInfo.java @@ -0,0 +1,341 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The partner connector type is KafkaAzureCosmosDBSource. + */ +@Fluent +public final class KafkaAzureCosmosDBSourceConnectorInfo extends PartnerInfoBase { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.KAFKA_AZURE_COSMOS_DBSOURCE; + + /* + * Kafka Auth Type + */ + private AuthType authType; + + /* + * Kafka Input Data Format Type + */ + private DataFormatType inputFormat; + + /* + * Kafka Output Data Format Type + */ + private DataFormatType outputFormat; + + /* + * Kafka API Key + */ + private String apiKey; + + /* + * Kafka API Secret + */ + private String apiSecret; + + /* + * Kafka Service Account Id + */ + private String serviceAccountId; + + /* + * Kafka topics Regex pattern + */ + private String topicRegex; + + /* + * Kafka topics directory + */ + private String topicsDir; + + /* + * Maximum Tasks + */ + private String maxTasks; + + /** + * Creates an instance of KafkaAzureCosmosDBSourceConnectorInfo class. + */ + public KafkaAzureCosmosDBSourceConnectorInfo() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + @Override + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Get the authType property: Kafka Auth Type. + * + * @return the authType value. + */ + public AuthType authType() { + return this.authType; + } + + /** + * Set the authType property: Kafka Auth Type. + * + * @param authType the authType value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withAuthType(AuthType authType) { + this.authType = authType; + return this; + } + + /** + * Get the inputFormat property: Kafka Input Data Format Type. + * + * @return the inputFormat value. + */ + public DataFormatType inputFormat() { + return this.inputFormat; + } + + /** + * Set the inputFormat property: Kafka Input Data Format Type. + * + * @param inputFormat the inputFormat value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withInputFormat(DataFormatType inputFormat) { + this.inputFormat = inputFormat; + return this; + } + + /** + * Get the outputFormat property: Kafka Output Data Format Type. + * + * @return the outputFormat value. + */ + public DataFormatType outputFormat() { + return this.outputFormat; + } + + /** + * Set the outputFormat property: Kafka Output Data Format Type. + * + * @param outputFormat the outputFormat value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withOutputFormat(DataFormatType outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + /** + * Get the apiKey property: Kafka API Key. + * + * @return the apiKey value. + */ + public String apiKey() { + return this.apiKey; + } + + /** + * Set the apiKey property: Kafka API Key. + * + * @param apiKey the apiKey value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withApiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + /** + * Get the apiSecret property: Kafka API Secret. + * + * @return the apiSecret value. + */ + public String apiSecret() { + return this.apiSecret; + } + + /** + * Set the apiSecret property: Kafka API Secret. + * + * @param apiSecret the apiSecret value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withApiSecret(String apiSecret) { + this.apiSecret = apiSecret; + return this; + } + + /** + * Get the serviceAccountId property: Kafka Service Account Id. + * + * @return the serviceAccountId value. + */ + public String serviceAccountId() { + return this.serviceAccountId; + } + + /** + * Set the serviceAccountId property: Kafka Service Account Id. + * + * @param serviceAccountId the serviceAccountId value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withServiceAccountId(String serviceAccountId) { + this.serviceAccountId = serviceAccountId; + return this; + } + + /** + * Get the topicRegex property: Kafka topics Regex pattern. + * + * @return the topicRegex value. + */ + public String topicRegex() { + return this.topicRegex; + } + + /** + * Set the topicRegex property: Kafka topics Regex pattern. + * + * @param topicRegex the topicRegex value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withTopicRegex(String topicRegex) { + this.topicRegex = topicRegex; + return this; + } + + /** + * Get the topicsDir property: Kafka topics directory. + * + * @return the topicsDir value. + */ + public String topicsDir() { + return this.topicsDir; + } + + /** + * Set the topicsDir property: Kafka topics directory. + * + * @param topicsDir the topicsDir value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withTopicsDir(String topicsDir) { + this.topicsDir = topicsDir; + return this; + } + + /** + * Get the maxTasks property: Maximum Tasks. + * + * @return the maxTasks value. + */ + public String maxTasks() { + return this.maxTasks; + } + + /** + * Set the maxTasks property: Maximum Tasks. + * + * @param maxTasks the maxTasks value to set. + * @return the KafkaAzureCosmosDBSourceConnectorInfo object itself. + */ + public KafkaAzureCosmosDBSourceConnectorInfo withMaxTasks(String maxTasks) { + this.maxTasks = maxTasks; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + jsonWriter.writeStringField("authType", this.authType == null ? null : this.authType.toString()); + jsonWriter.writeStringField("inputFormat", this.inputFormat == null ? null : this.inputFormat.toString()); + jsonWriter.writeStringField("outputFormat", this.outputFormat == null ? null : this.outputFormat.toString()); + jsonWriter.writeStringField("apiKey", this.apiKey); + jsonWriter.writeStringField("apiSecret", this.apiSecret); + jsonWriter.writeStringField("serviceAccountId", this.serviceAccountId); + jsonWriter.writeStringField("topicRegex", this.topicRegex); + jsonWriter.writeStringField("topicsDir", this.topicsDir); + jsonWriter.writeStringField("maxTasks", this.maxTasks); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of KafkaAzureCosmosDBSourceConnectorInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of KafkaAzureCosmosDBSourceConnectorInfo if the JsonReader was pointing to an instance of it, + * or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the KafkaAzureCosmosDBSourceConnectorInfo. + */ + public static KafkaAzureCosmosDBSourceConnectorInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + KafkaAzureCosmosDBSourceConnectorInfo deserializedKafkaAzureCosmosDBSourceConnectorInfo + = new KafkaAzureCosmosDBSourceConnectorInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else if ("authType".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.authType + = AuthType.fromString(reader.getString()); + } else if ("inputFormat".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.inputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("outputFormat".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.outputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("apiKey".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.apiKey = reader.getString(); + } else if ("apiSecret".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.apiSecret = reader.getString(); + } else if ("serviceAccountId".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.serviceAccountId = reader.getString(); + } else if ("topicRegex".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.topicRegex = reader.getString(); + } else if ("topicsDir".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.topicsDir = reader.getString(); + } else if ("maxTasks".equals(fieldName)) { + deserializedKafkaAzureCosmosDBSourceConnectorInfo.maxTasks = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedKafkaAzureCosmosDBSourceConnectorInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureSynapseAnalyticsSinkConnectorInfo.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureSynapseAnalyticsSinkConnectorInfo.java new file mode 100644 index 000000000000..20b1365fffc4 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/KafkaAzureSynapseAnalyticsSinkConnectorInfo.java @@ -0,0 +1,399 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; +import java.util.List; + +/** + * The partner connector type is KafkaAzureSynapseAnalyticsSink. + */ +@Fluent +public final class KafkaAzureSynapseAnalyticsSinkConnectorInfo extends PartnerInfoBase { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.KAFKA_AZURE_SYNAPSE_ANALYTICS_SINK; + + /* + * Kafka Auth Type + */ + private AuthType authType; + + /* + * Kafka Input Data Format Type + */ + private DataFormatType inputFormat; + + /* + * Kafka Output Data Format Type + */ + private DataFormatType outputFormat; + + /* + * Kafka API Key + */ + private String apiKey; + + /* + * Kafka API Key Secret + */ + private String apiSecret; + + /* + * Kafka Service Account Id + */ + private String serviceAccountId; + + /* + * Kafka topics list + */ + private List topics; + + /* + * Kafka topics directory + */ + private String topicsDir; + + /* + * Flush size + */ + private String flushSize; + + /* + * Maximum Tasks + */ + private String maxTasks; + + /* + * Time Interval + */ + private String timeInterval; + + /** + * Creates an instance of KafkaAzureSynapseAnalyticsSinkConnectorInfo class. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + @Override + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Get the authType property: Kafka Auth Type. + * + * @return the authType value. + */ + public AuthType authType() { + return this.authType; + } + + /** + * Set the authType property: Kafka Auth Type. + * + * @param authType the authType value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withAuthType(AuthType authType) { + this.authType = authType; + return this; + } + + /** + * Get the inputFormat property: Kafka Input Data Format Type. + * + * @return the inputFormat value. + */ + public DataFormatType inputFormat() { + return this.inputFormat; + } + + /** + * Set the inputFormat property: Kafka Input Data Format Type. + * + * @param inputFormat the inputFormat value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withInputFormat(DataFormatType inputFormat) { + this.inputFormat = inputFormat; + return this; + } + + /** + * Get the outputFormat property: Kafka Output Data Format Type. + * + * @return the outputFormat value. + */ + public DataFormatType outputFormat() { + return this.outputFormat; + } + + /** + * Set the outputFormat property: Kafka Output Data Format Type. + * + * @param outputFormat the outputFormat value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withOutputFormat(DataFormatType outputFormat) { + this.outputFormat = outputFormat; + return this; + } + + /** + * Get the apiKey property: Kafka API Key. + * + * @return the apiKey value. + */ + public String apiKey() { + return this.apiKey; + } + + /** + * Set the apiKey property: Kafka API Key. + * + * @param apiKey the apiKey value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withApiKey(String apiKey) { + this.apiKey = apiKey; + return this; + } + + /** + * Get the apiSecret property: Kafka API Key Secret. + * + * @return the apiSecret value. + */ + public String apiSecret() { + return this.apiSecret; + } + + /** + * Set the apiSecret property: Kafka API Key Secret. + * + * @param apiSecret the apiSecret value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withApiSecret(String apiSecret) { + this.apiSecret = apiSecret; + return this; + } + + /** + * Get the serviceAccountId property: Kafka Service Account Id. + * + * @return the serviceAccountId value. + */ + public String serviceAccountId() { + return this.serviceAccountId; + } + + /** + * Set the serviceAccountId property: Kafka Service Account Id. + * + * @param serviceAccountId the serviceAccountId value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withServiceAccountId(String serviceAccountId) { + this.serviceAccountId = serviceAccountId; + return this; + } + + /** + * Get the topics property: Kafka topics list. + * + * @return the topics value. + */ + public List topics() { + return this.topics; + } + + /** + * Set the topics property: Kafka topics list. + * + * @param topics the topics value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withTopics(List topics) { + this.topics = topics; + return this; + } + + /** + * Get the topicsDir property: Kafka topics directory. + * + * @return the topicsDir value. + */ + public String topicsDir() { + return this.topicsDir; + } + + /** + * Set the topicsDir property: Kafka topics directory. + * + * @param topicsDir the topicsDir value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withTopicsDir(String topicsDir) { + this.topicsDir = topicsDir; + return this; + } + + /** + * Get the flushSize property: Flush size. + * + * @return the flushSize value. + */ + public String flushSize() { + return this.flushSize; + } + + /** + * Set the flushSize property: Flush size. + * + * @param flushSize the flushSize value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withFlushSize(String flushSize) { + this.flushSize = flushSize; + return this; + } + + /** + * Get the maxTasks property: Maximum Tasks. + * + * @return the maxTasks value. + */ + public String maxTasks() { + return this.maxTasks; + } + + /** + * Set the maxTasks property: Maximum Tasks. + * + * @param maxTasks the maxTasks value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withMaxTasks(String maxTasks) { + this.maxTasks = maxTasks; + return this; + } + + /** + * Get the timeInterval property: Time Interval. + * + * @return the timeInterval value. + */ + public String timeInterval() { + return this.timeInterval; + } + + /** + * Set the timeInterval property: Time Interval. + * + * @param timeInterval the timeInterval value to set. + * @return the KafkaAzureSynapseAnalyticsSinkConnectorInfo object itself. + */ + public KafkaAzureSynapseAnalyticsSinkConnectorInfo withTimeInterval(String timeInterval) { + this.timeInterval = timeInterval; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + @Override + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + jsonWriter.writeStringField("authType", this.authType == null ? null : this.authType.toString()); + jsonWriter.writeStringField("inputFormat", this.inputFormat == null ? null : this.inputFormat.toString()); + jsonWriter.writeStringField("outputFormat", this.outputFormat == null ? null : this.outputFormat.toString()); + jsonWriter.writeStringField("apiKey", this.apiKey); + jsonWriter.writeStringField("apiSecret", this.apiSecret); + jsonWriter.writeStringField("serviceAccountId", this.serviceAccountId); + jsonWriter.writeArrayField("topics", this.topics, (writer, element) -> writer.writeString(element)); + jsonWriter.writeStringField("topicsDir", this.topicsDir); + jsonWriter.writeStringField("flushSize", this.flushSize); + jsonWriter.writeStringField("maxTasks", this.maxTasks); + jsonWriter.writeStringField("timeInterval", this.timeInterval); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of KafkaAzureSynapseAnalyticsSinkConnectorInfo from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of KafkaAzureSynapseAnalyticsSinkConnectorInfo if the JsonReader was pointing to an instance + * of it, or null if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the KafkaAzureSynapseAnalyticsSinkConnectorInfo. + */ + public static KafkaAzureSynapseAnalyticsSinkConnectorInfo fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + KafkaAzureSynapseAnalyticsSinkConnectorInfo deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo + = new KafkaAzureSynapseAnalyticsSinkConnectorInfo(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else if ("authType".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.authType + = AuthType.fromString(reader.getString()); + } else if ("inputFormat".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.inputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("outputFormat".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.outputFormat + = DataFormatType.fromString(reader.getString()); + } else if ("apiKey".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.apiKey = reader.getString(); + } else if ("apiSecret".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.apiSecret = reader.getString(); + } else if ("serviceAccountId".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.serviceAccountId = reader.getString(); + } else if ("topics".equals(fieldName)) { + List topics = reader.readArray(reader1 -> reader1.getString()); + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.topics = topics; + } else if ("topicsDir".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.topicsDir = reader.getString(); + } else if ("flushSize".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.flushSize = reader.getString(); + } else if ("maxTasks".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.maxTasks = reader.getString(); + } else if ("timeInterval".equals(fieldName)) { + deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo.timeInterval = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedKafkaAzureSynapseAnalyticsSinkConnectorInfo; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListConnectorsSuccessResponse.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListConnectorsSuccessResponse.java new file mode 100644 index 000000000000..43c668c39b49 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListConnectorsSuccessResponse.java @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.confluent.fluent.models.ConnectorResourceInner; +import java.io.IOException; +import java.util.List; + +/** + * Result of GET request to list connectors in the cluster of a confluent organization. + */ +@Fluent +public final class ListConnectorsSuccessResponse implements JsonSerializable { + /* + * List of connectors in a cluster of a confluent organization + */ + private List value; + + /* + * URL to get the next set of connectors records if there are any. + */ + private String nextLink; + + /** + * Creates an instance of ListConnectorsSuccessResponse class. + */ + public ListConnectorsSuccessResponse() { + } + + /** + * Get the value property: List of connectors in a cluster of a confluent organization. + * + * @return the value value. + */ + public List value() { + return this.value; + } + + /** + * Set the value property: List of connectors in a cluster of a confluent organization. + * + * @param value the value value to set. + * @return the ListConnectorsSuccessResponse object itself. + */ + public ListConnectorsSuccessResponse withValue(List value) { + this.value = value; + return this; + } + + /** + * Get the nextLink property: URL to get the next set of connectors records if there are any. + * + * @return the nextLink value. + */ + public String nextLink() { + return this.nextLink; + } + + /** + * Set the nextLink property: URL to get the next set of connectors records if there are any. + * + * @param nextLink the nextLink value to set. + * @return the ListConnectorsSuccessResponse object itself. + */ + public ListConnectorsSuccessResponse withNextLink(String nextLink) { + this.nextLink = nextLink; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (value() != null) { + value().forEach(e -> e.validate()); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ListConnectorsSuccessResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ListConnectorsSuccessResponse if the JsonReader was pointing to an instance of it, or null + * if it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ListConnectorsSuccessResponse. + */ + public static ListConnectorsSuccessResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ListConnectorsSuccessResponse deserializedListConnectorsSuccessResponse + = new ListConnectorsSuccessResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value + = reader.readArray(reader1 -> ConnectorResourceInner.fromJson(reader1)); + deserializedListConnectorsSuccessResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedListConnectorsSuccessResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedListConnectorsSuccessResponse; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListTopicsSuccessResponse.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListTopicsSuccessResponse.java new file mode 100644 index 000000000000..a6498a62b3e9 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/ListTopicsSuccessResponse.java @@ -0,0 +1,127 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; +import java.io.IOException; +import java.util.List; + +/** + * Result of GET request to list topics in the cluster of a confluent organization. + */ +@Fluent +public final class ListTopicsSuccessResponse implements JsonSerializable { + /* + * List of topics in a cluster of a confluent organization + */ + private List value; + + /* + * URL to get the next set of topics records if there are any. + */ + private String nextLink; + + /** + * Creates an instance of ListTopicsSuccessResponse class. + */ + public ListTopicsSuccessResponse() { + } + + /** + * Get the value property: List of topics in a cluster of a confluent organization. + * + * @return the value value. + */ + public List value() { + return this.value; + } + + /** + * Set the value property: List of topics in a cluster of a confluent organization. + * + * @param value the value value to set. + * @return the ListTopicsSuccessResponse object itself. + */ + public ListTopicsSuccessResponse withValue(List value) { + this.value = value; + return this; + } + + /** + * Get the nextLink property: URL to get the next set of topics records if there are any. + * + * @return the nextLink value. + */ + public String nextLink() { + return this.nextLink; + } + + /** + * Set the nextLink property: URL to get the next set of topics records if there are any. + * + * @param nextLink the nextLink value to set. + * @return the ListTopicsSuccessResponse object itself. + */ + public ListTopicsSuccessResponse withNextLink(String nextLink) { + this.nextLink = nextLink; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + if (value() != null) { + value().forEach(e -> e.validate()); + } + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeArrayField("value", this.value, (writer, element) -> writer.writeJson(element)); + jsonWriter.writeStringField("nextLink", this.nextLink); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of ListTopicsSuccessResponse from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of ListTopicsSuccessResponse if the JsonReader was pointing to an instance of it, or null if + * it was pointing to JSON null. + * @throws IOException If an error occurs while reading the ListTopicsSuccessResponse. + */ + public static ListTopicsSuccessResponse fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + ListTopicsSuccessResponse deserializedListTopicsSuccessResponse = new ListTopicsSuccessResponse(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("value".equals(fieldName)) { + List value = reader.readArray(reader1 -> TopicRecordInner.fromJson(reader1)); + deserializedListTopicsSuccessResponse.value = value; + } else if ("nextLink".equals(fieldName)) { + deserializedListTopicsSuccessResponse.nextLink = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedListTopicsSuccessResponse; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Package.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Package.java new file mode 100644 index 000000000000..55d293dbb9ce --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Package.java @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * Stream Governance Package. Supported values are ESSENTIALS and ADVANCED. + */ +public final class Package extends ExpandableStringEnum { + /** + * Static value ESSENTIALS for Package. + */ + public static final Package ESSENTIALS = fromString("ESSENTIALS"); + + /** + * Static value ADVANCED for Package. + */ + public static final Package ADVANCED = fromString("ADVANCED"); + + /** + * Creates a new instance of Package value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public Package() { + } + + /** + * Creates or finds a Package from its string representation. + * + * @param name a name to look for. + * @return the corresponding Package. + */ + public static Package fromString(String name) { + return fromString(name, Package.class); + } + + /** + * Gets known Package values. + * + * @return known Package values. + */ + public static Collection values() { + return values(Package.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerConnectorType.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerConnectorType.java new file mode 100644 index 000000000000..7197840a6b17 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerConnectorType.java @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.ExpandableStringEnum; +import java.util.Collection; + +/** + * Partner Connector type. + */ +public final class PartnerConnectorType extends ExpandableStringEnum { + /** + * Static value KafkaAzureBlobStorageSource for PartnerConnectorType. + */ + public static final PartnerConnectorType KAFKA_AZURE_BLOB_STORAGE_SOURCE + = fromString("KafkaAzureBlobStorageSource"); + + /** + * Static value KafkaAzureBlobStorageSink for PartnerConnectorType. + */ + public static final PartnerConnectorType KAFKA_AZURE_BLOB_STORAGE_SINK = fromString("KafkaAzureBlobStorageSink"); + + /** + * Static value KafkaAzureCosmosDBSource for PartnerConnectorType. + */ + public static final PartnerConnectorType KAFKA_AZURE_COSMOS_DBSOURCE = fromString("KafkaAzureCosmosDBSource"); + + /** + * Static value KafkaAzureCosmosDBSink for PartnerConnectorType. + */ + public static final PartnerConnectorType KAFKA_AZURE_COSMOS_DBSINK = fromString("KafkaAzureCosmosDBSink"); + + /** + * Static value KafkaAzureSynapseAnalyticsSink for PartnerConnectorType. + */ + public static final PartnerConnectorType KAFKA_AZURE_SYNAPSE_ANALYTICS_SINK + = fromString("KafkaAzureSynapseAnalyticsSink"); + + /** + * Creates a new instance of PartnerConnectorType value. + * + * @deprecated Use the {@link #fromString(String)} factory method. + */ + @Deprecated + public PartnerConnectorType() { + } + + /** + * Creates or finds a PartnerConnectorType from its string representation. + * + * @param name a name to look for. + * @return the corresponding PartnerConnectorType. + */ + public static PartnerConnectorType fromString(String name) { + return fromString(name, PartnerConnectorType.class); + } + + /** + * Gets known PartnerConnectorType values. + * + * @return known PartnerConnectorType values. + */ + public static Collection values() { + return values(PartnerConnectorType.class); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerInfoBase.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerInfoBase.java new file mode 100644 index 000000000000..ff90b20d7680 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/PartnerInfoBase.java @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Immutable; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * The partner info base. + */ +@Immutable +public class PartnerInfoBase implements JsonSerializable { + /* + * The partner connector type. + */ + private PartnerConnectorType partnerConnectorType = PartnerConnectorType.fromString("PartnerInfoBase"); + + /** + * Creates an instance of PartnerInfoBase class. + */ + public PartnerInfoBase() { + } + + /** + * Get the partnerConnectorType property: The partner connector type. + * + * @return the partnerConnectorType value. + */ + public PartnerConnectorType partnerConnectorType() { + return this.partnerConnectorType; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("partnerConnectorType", + this.partnerConnectorType == null ? null : this.partnerConnectorType.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of PartnerInfoBase from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of PartnerInfoBase if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the PartnerInfoBase. + */ + public static PartnerInfoBase fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + String discriminatorValue = null; + try (JsonReader readerToUse = reader.bufferObject()) { + readerToUse.nextToken(); // Prepare for reading + while (readerToUse.nextToken() != JsonToken.END_OBJECT) { + String fieldName = readerToUse.getFieldName(); + readerToUse.nextToken(); + if ("partnerConnectorType".equals(fieldName)) { + discriminatorValue = readerToUse.getString(); + break; + } else { + readerToUse.skipChildren(); + } + } + // Use the discriminator value to determine which subtype should be deserialized. + if ("KafkaAzureBlobStorageSink".equals(discriminatorValue)) { + return KafkaAzureBlobStorageSinkConnectorInfo.fromJson(readerToUse.reset()); + } else if ("KafkaAzureBlobStorageSource".equals(discriminatorValue)) { + return KafkaAzureBlobStorageSourceConnectorInfo.fromJson(readerToUse.reset()); + } else if ("KafkaAzureCosmosDBSink".equals(discriminatorValue)) { + return KafkaAzureCosmosDBSinkConnectorInfo.fromJson(readerToUse.reset()); + } else if ("KafkaAzureCosmosDBSource".equals(discriminatorValue)) { + return KafkaAzureCosmosDBSourceConnectorInfo.fromJson(readerToUse.reset()); + } else if ("KafkaAzureSynapseAnalyticsSink".equals(discriminatorValue)) { + return KafkaAzureSynapseAnalyticsSinkConnectorInfo.fromJson(readerToUse.reset()); + } else { + return fromJsonKnownDiscriminator(readerToUse.reset()); + } + } + }); + } + + static PartnerInfoBase fromJsonKnownDiscriminator(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + PartnerInfoBase deserializedPartnerInfoBase = new PartnerInfoBase(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("partnerConnectorType".equals(fieldName)) { + deserializedPartnerInfoBase.partnerConnectorType + = PartnerConnectorType.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedPartnerInfoBase; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterRecord.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterRecord.java index 1824b1ba6ed7..c7a97bc0e865 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterRecord.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterRecord.java @@ -4,6 +4,7 @@ package com.azure.resourcemanager.confluent.models; +import com.azure.core.util.Context; import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; /** @@ -24,6 +25,13 @@ public interface SCClusterRecord { */ String id(); + /** + * Gets the type property: Type of the resource. + * + * @return the type value. + */ + String type(); + /** * Gets the name property: Display name of the cluster. * @@ -52,10 +60,261 @@ public interface SCClusterRecord { */ ClusterStatusEntity status(); + /** + * Gets the name of the resource group. + * + * @return the name of the resource group. + */ + String resourceGroupName(); + /** * Gets the inner com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner object. * * @return the inner object. */ SCClusterRecordInner innerModel(); + + /** + * The entirety of the SCClusterRecord definition. + */ + interface Definition + extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate { + } + + /** + * The SCClusterRecord definition stages. + */ + interface DefinitionStages { + /** + * The first stage of the SCClusterRecord definition. + */ + interface Blank extends WithParentResource { + } + + /** + * The stage of the SCClusterRecord definition allowing to specify parent resource. + */ + interface WithParentResource { + /** + * Specifies resourceGroupName, organizationName, environmentId. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @return the next definition stage. + */ + WithCreate withExistingEnvironment(String resourceGroupName, String organizationName, String environmentId); + } + + /** + * The stage of the SCClusterRecord definition which contains all the minimum required properties for the + * resource to be created, but also allows for any other optional properties to be specified. + */ + interface WithCreate extends DefinitionStages.WithKind, DefinitionStages.WithType, DefinitionStages.WithName, + DefinitionStages.WithMetadata, DefinitionStages.WithSpec, DefinitionStages.WithStatus { + /** + * Executes the create request. + * + * @return the created resource. + */ + SCClusterRecord create(); + + /** + * Executes the create request. + * + * @param context The context to associate with this operation. + * @return the created resource. + */ + SCClusterRecord create(Context context); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify kind. + */ + interface WithKind { + /** + * Specifies the kind property: Type of cluster. + * + * @param kind Type of cluster. + * @return the next definition stage. + */ + WithCreate withKind(String kind); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify type. + */ + interface WithType { + /** + * Specifies the type property: Type of the resource. + * + * @param type Type of the resource. + * @return the next definition stage. + */ + WithCreate withType(String type); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify name. + */ + interface WithName { + /** + * Specifies the name property: Display name of the cluster. + * + * @param name Display name of the cluster. + * @return the next definition stage. + */ + WithCreate withName(String name); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify metadata. + */ + interface WithMetadata { + /** + * Specifies the metadata property: Metadata of the record. + * + * @param metadata Metadata of the record. + * @return the next definition stage. + */ + WithCreate withMetadata(SCMetadataEntity metadata); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify spec. + */ + interface WithSpec { + /** + * Specifies the spec property: Specification of the cluster. + * + * @param spec Specification of the cluster. + * @return the next definition stage. + */ + WithCreate withSpec(SCClusterSpecEntity spec); + } + + /** + * The stage of the SCClusterRecord definition allowing to specify status. + */ + interface WithStatus { + /** + * Specifies the status property: Specification of the cluster status. + * + * @param status Specification of the cluster status. + * @return the next definition stage. + */ + WithCreate withStatus(ClusterStatusEntity status); + } + } + + /** + * Begins update for the SCClusterRecord resource. + * + * @return the stage of resource update. + */ + SCClusterRecord.Update update(); + + /** + * The template for SCClusterRecord update. + */ + interface Update extends UpdateStages.WithKind, UpdateStages.WithType, UpdateStages.WithName, + UpdateStages.WithMetadata, UpdateStages.WithSpec, UpdateStages.WithStatus { + /** + * Executes the update request. + * + * @return the updated resource. + */ + SCClusterRecord apply(); + + /** + * Executes the update request. + * + * @param context The context to associate with this operation. + * @return the updated resource. + */ + SCClusterRecord apply(Context context); + } + + /** + * The SCClusterRecord update stages. + */ + interface UpdateStages { + /** + * The stage of the SCClusterRecord update allowing to specify kind. + */ + interface WithKind { + /** + * Specifies the kind property: Type of cluster. + * + * @param kind Type of cluster. + * @return the next definition stage. + */ + Update withKind(String kind); + } + + /** + * The stage of the SCClusterRecord update allowing to specify type. + */ + interface WithType { + /** + * Specifies the type property: Type of the resource. + * + * @param type Type of the resource. + * @return the next definition stage. + */ + Update withType(String type); + } + + /** + * The stage of the SCClusterRecord update allowing to specify name. + */ + interface WithName { + /** + * Specifies the name property: Display name of the cluster. + * + * @param name Display name of the cluster. + * @return the next definition stage. + */ + Update withName(String name); + } + + /** + * The stage of the SCClusterRecord update allowing to specify metadata. + */ + interface WithMetadata { + /** + * Specifies the metadata property: Metadata of the record. + * + * @param metadata Metadata of the record. + * @return the next definition stage. + */ + Update withMetadata(SCMetadataEntity metadata); + } + + /** + * The stage of the SCClusterRecord update allowing to specify spec. + */ + interface WithSpec { + /** + * Specifies the spec property: Specification of the cluster. + * + * @param spec Specification of the cluster. + * @return the next definition stage. + */ + Update withSpec(SCClusterSpecEntity spec); + } + + /** + * The stage of the SCClusterRecord update allowing to specify status. + */ + interface WithStatus { + /** + * Specifies the status property: Specification of the cluster status. + * + * @param status Specification of the cluster status. + * @return the next definition stage. + */ + Update withStatus(ClusterStatusEntity status); + } + } } diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterSpecEntity.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterSpecEntity.java index 8c789f7418ae..331907f9ab9b 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterSpecEntity.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/SCClusterSpecEntity.java @@ -36,6 +36,11 @@ public final class SCClusterSpecEntity implements JsonSerializable { + /* + * Stream governance configuration + */ + private Package packageProperty; + + /** + * Creates an instance of StreamGovernanceConfig class. + */ + public StreamGovernanceConfig() { + } + + /** + * Get the packageProperty property: Stream governance configuration. + * + * @return the packageProperty value. + */ + public Package packageProperty() { + return this.packageProperty; + } + + /** + * Set the packageProperty property: Stream governance configuration. + * + * @param packageProperty the packageProperty value to set. + * @return the StreamGovernanceConfig object itself. + */ + public StreamGovernanceConfig withPackageProperty(Package packageProperty) { + this.packageProperty = packageProperty; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("package", this.packageProperty == null ? null : this.packageProperty.toString()); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of StreamGovernanceConfig from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of StreamGovernanceConfig if the JsonReader was pointing to an instance of it, or null if it + * was pointing to JSON null. + * @throws IOException If an error occurs while reading the StreamGovernanceConfig. + */ + public static StreamGovernanceConfig fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + StreamGovernanceConfig deserializedStreamGovernanceConfig = new StreamGovernanceConfig(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("package".equals(fieldName)) { + deserializedStreamGovernanceConfig.packageProperty = Package.fromString(reader.getString()); + } else { + reader.skipChildren(); + } + } + + return deserializedStreamGovernanceConfig; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicMetadataEntity.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicMetadataEntity.java new file mode 100644 index 000000000000..4fb22c7b58e8 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicMetadataEntity.java @@ -0,0 +1,121 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * Metadata of the data record. + */ +@Fluent +public final class TopicMetadataEntity implements JsonSerializable { + /* + * Self lookup url + */ + private String self; + + /* + * Resource name of the record + */ + private String resourceName; + + /** + * Creates an instance of TopicMetadataEntity class. + */ + public TopicMetadataEntity() { + } + + /** + * Get the self property: Self lookup url. + * + * @return the self value. + */ + public String self() { + return this.self; + } + + /** + * Set the self property: Self lookup url. + * + * @param self the self value to set. + * @return the TopicMetadataEntity object itself. + */ + public TopicMetadataEntity withSelf(String self) { + this.self = self; + return this; + } + + /** + * Get the resourceName property: Resource name of the record. + * + * @return the resourceName value. + */ + public String resourceName() { + return this.resourceName; + } + + /** + * Set the resourceName property: Resource name of the record. + * + * @param resourceName the resourceName value to set. + * @return the TopicMetadataEntity object itself. + */ + public TopicMetadataEntity withResourceName(String resourceName) { + this.resourceName = resourceName; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("self", this.self); + jsonWriter.writeStringField("resourceName", this.resourceName); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TopicMetadataEntity from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TopicMetadataEntity if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TopicMetadataEntity. + */ + public static TopicMetadataEntity fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TopicMetadataEntity deserializedTopicMetadataEntity = new TopicMetadataEntity(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("self".equals(fieldName)) { + deserializedTopicMetadataEntity.self = reader.getString(); + } else if ("resourceName".equals(fieldName)) { + deserializedTopicMetadataEntity.resourceName = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTopicMetadataEntity; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicRecord.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicRecord.java new file mode 100644 index 000000000000..127c152d62c6 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicRecord.java @@ -0,0 +1,296 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.util.Context; +import com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner; +import java.util.List; + +/** + * An immutable client-side representation of TopicRecord. + */ +public interface TopicRecord { + /** + * Gets the id property: Fully qualified resource Id for the resource. + * + * @return the id value. + */ + String id(); + + /** + * Gets the name property: The name of the resource. + * + * @return the name value. + */ + String name(); + + /** + * Gets the type property: The type of the resource. + * + * @return the type value. + */ + String type(); + + /** + * Gets the kind property: Type of topic. + * + * @return the kind value. + */ + String kind(); + + /** + * Gets the topicId property: Topic Id returned by Confluent. + * + * @return the topicId value. + */ + String topicId(); + + /** + * Gets the metadata property: Metadata of the record. + * + * @return the metadata value. + */ + TopicMetadataEntity metadata(); + + /** + * Gets the partitions property: Partition Specification of the topic. + * + * @return the partitions value. + */ + TopicsRelatedLink partitions(); + + /** + * Gets the configs property: Config Specification of the topic. + * + * @return the configs value. + */ + TopicsRelatedLink configs(); + + /** + * Gets the inputConfigs property: Input Config Specification of the topic. + * + * @return the inputConfigs value. + */ + List inputConfigs(); + + /** + * Gets the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @return the partitionsReassignments value. + */ + TopicsRelatedLink partitionsReassignments(); + + /** + * Gets the partitionsCount property: Partition count of the topic. + * + * @return the partitionsCount value. + */ + String partitionsCount(); + + /** + * Gets the replicationFactor property: Replication factor of the topic. + * + * @return the replicationFactor value. + */ + String replicationFactor(); + + /** + * Gets the inner com.azure.resourcemanager.confluent.fluent.models.TopicRecordInner object. + * + * @return the inner object. + */ + TopicRecordInner innerModel(); + + /** + * The entirety of the TopicRecord definition. + */ + interface Definition + extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate { + } + + /** + * The TopicRecord definition stages. + */ + interface DefinitionStages { + /** + * The first stage of the TopicRecord definition. + */ + interface Blank extends WithParentResource { + } + + /** + * The stage of the TopicRecord definition allowing to specify parent resource. + */ + interface WithParentResource { + /** + * Specifies resourceGroupName, organizationName, environmentId, clusterId. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @return the next definition stage. + */ + WithCreate withExistingCluster(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + } + + /** + * The stage of the TopicRecord definition which contains all the minimum required properties for the resource + * to be created, but also allows for any other optional properties to be specified. + */ + interface WithCreate extends DefinitionStages.WithKind, DefinitionStages.WithTopicId, + DefinitionStages.WithMetadata, DefinitionStages.WithPartitions, DefinitionStages.WithConfigs, + DefinitionStages.WithInputConfigs, DefinitionStages.WithPartitionsReassignments, + DefinitionStages.WithPartitionsCount, DefinitionStages.WithReplicationFactor { + /** + * Executes the create request. + * + * @return the created resource. + */ + TopicRecord create(); + + /** + * Executes the create request. + * + * @param context The context to associate with this operation. + * @return the created resource. + */ + TopicRecord create(Context context); + } + + /** + * The stage of the TopicRecord definition allowing to specify kind. + */ + interface WithKind { + /** + * Specifies the kind property: Type of topic. + * + * @param kind Type of topic. + * @return the next definition stage. + */ + WithCreate withKind(String kind); + } + + /** + * The stage of the TopicRecord definition allowing to specify topicId. + */ + interface WithTopicId { + /** + * Specifies the topicId property: Topic Id returned by Confluent. + * + * @param topicId Topic Id returned by Confluent. + * @return the next definition stage. + */ + WithCreate withTopicId(String topicId); + } + + /** + * The stage of the TopicRecord definition allowing to specify metadata. + */ + interface WithMetadata { + /** + * Specifies the metadata property: Metadata of the record. + * + * @param metadata Metadata of the record. + * @return the next definition stage. + */ + WithCreate withMetadata(TopicMetadataEntity metadata); + } + + /** + * The stage of the TopicRecord definition allowing to specify partitions. + */ + interface WithPartitions { + /** + * Specifies the partitions property: Partition Specification of the topic. + * + * @param partitions Partition Specification of the topic. + * @return the next definition stage. + */ + WithCreate withPartitions(TopicsRelatedLink partitions); + } + + /** + * The stage of the TopicRecord definition allowing to specify configs. + */ + interface WithConfigs { + /** + * Specifies the configs property: Config Specification of the topic. + * + * @param configs Config Specification of the topic. + * @return the next definition stage. + */ + WithCreate withConfigs(TopicsRelatedLink configs); + } + + /** + * The stage of the TopicRecord definition allowing to specify inputConfigs. + */ + interface WithInputConfigs { + /** + * Specifies the inputConfigs property: Input Config Specification of the topic. + * + * @param inputConfigs Input Config Specification of the topic. + * @return the next definition stage. + */ + WithCreate withInputConfigs(List inputConfigs); + } + + /** + * The stage of the TopicRecord definition allowing to specify partitionsReassignments. + */ + interface WithPartitionsReassignments { + /** + * Specifies the partitionsReassignments property: Partition Reassignment Specification of the topic. + * + * @param partitionsReassignments Partition Reassignment Specification of the topic. + * @return the next definition stage. + */ + WithCreate withPartitionsReassignments(TopicsRelatedLink partitionsReassignments); + } + + /** + * The stage of the TopicRecord definition allowing to specify partitionsCount. + */ + interface WithPartitionsCount { + /** + * Specifies the partitionsCount property: Partition count of the topic. + * + * @param partitionsCount Partition count of the topic. + * @return the next definition stage. + */ + WithCreate withPartitionsCount(String partitionsCount); + } + + /** + * The stage of the TopicRecord definition allowing to specify replicationFactor. + */ + interface WithReplicationFactor { + /** + * Specifies the replicationFactor property: Replication factor of the topic. + * + * @param replicationFactor Replication factor of the topic. + * @return the next definition stage. + */ + WithCreate withReplicationFactor(String replicationFactor); + } + } + + /** + * Refreshes the resource to sync with Azure. + * + * @return the refreshed resource. + */ + TopicRecord refresh(); + + /** + * Refreshes the resource to sync with Azure. + * + * @param context The context to associate with this operation. + * @return the refreshed resource. + */ + TopicRecord refresh(Context context); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Topics.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Topics.java new file mode 100644 index 000000000000..2441adda1a5d --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/Topics.java @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.http.rest.PagedIterable; +import com.azure.core.http.rest.Response; +import com.azure.core.util.Context; + +/** + * Resource collection API of Topics. + */ +public interface Topics { + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId); + + /** + * Lists of all the topics in a clusters. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param pageSize Pagination size. + * @param pageToken An opaque pagination token to fetch the next set of records. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return result of GET request to list topics in the cluster of a confluent organization as paginated response + * with {@link PagedIterable}. + */ + PagedIterable list(String resourceGroupName, String organizationName, String environmentId, + String clusterId, Integer pageSize, String pageToken, Context context); + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response}. + */ + Response getWithResponse(String resourceGroupName, String organizationName, String environmentId, + String clusterId, String topicName, Context context); + + /** + * Get confluent topic by Name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name. + */ + TopicRecord get(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName); + + /** + * Delete confluent topic by name. + * + * @param resourceGroupName The name of the resource group. The name is case insensitive. + * @param organizationName Organization resource name. + * @param environmentId Confluent environment id. + * @param clusterId Confluent kafka or schema registry cluster id. + * @param topicName Confluent kafka or schema registry topic name. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void delete(String resourceGroupName, String organizationName, String environmentId, String clusterId, + String topicName, Context context); + + /** + * Get confluent topic by Name. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response}. + */ + TopicRecord getById(String id); + + /** + * Get confluent topic by Name. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + * @return confluent topic by Name along with {@link Response}. + */ + Response getByIdWithResponse(String id, Context context); + + /** + * Delete confluent topic by name. + * + * @param id the resource ID. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteById(String id); + + /** + * Delete confluent topic by name. + * + * @param id the resource ID. + * @param context The context to associate with this operation. + * @throws IllegalArgumentException thrown if parameters fail the validation. + * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. + * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. + */ + void deleteByIdWithResponse(String id, Context context); + + /** + * Begins definition for a new TopicRecord resource. + * + * @param name resource name. + * @return the first stage of the new TopicRecord definition. + */ + TopicRecord.DefinitionStages.Blank define(String name); +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsInputConfig.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsInputConfig.java new file mode 100644 index 000000000000..58cbbe226854 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsInputConfig.java @@ -0,0 +1,121 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * Topics input config. + */ +@Fluent +public final class TopicsInputConfig implements JsonSerializable { + /* + * Name of the topic input config + */ + private String name; + + /* + * Value of the topic input config + */ + private String value; + + /** + * Creates an instance of TopicsInputConfig class. + */ + public TopicsInputConfig() { + } + + /** + * Get the name property: Name of the topic input config. + * + * @return the name value. + */ + public String name() { + return this.name; + } + + /** + * Set the name property: Name of the topic input config. + * + * @param name the name value to set. + * @return the TopicsInputConfig object itself. + */ + public TopicsInputConfig withName(String name) { + this.name = name; + return this; + } + + /** + * Get the value property: Value of the topic input config. + * + * @return the value value. + */ + public String value() { + return this.value; + } + + /** + * Set the value property: Value of the topic input config. + * + * @param value the value value to set. + * @return the TopicsInputConfig object itself. + */ + public TopicsInputConfig withValue(String value) { + this.value = value; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("name", this.name); + jsonWriter.writeStringField("value", this.value); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TopicsInputConfig from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TopicsInputConfig if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TopicsInputConfig. + */ + public static TopicsInputConfig fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TopicsInputConfig deserializedTopicsInputConfig = new TopicsInputConfig(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("name".equals(fieldName)) { + deserializedTopicsInputConfig.name = reader.getString(); + } else if ("value".equals(fieldName)) { + deserializedTopicsInputConfig.value = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTopicsInputConfig; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsRelatedLink.java b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsRelatedLink.java new file mode 100644 index 000000000000..f9a962927c6a --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/java/com/azure/resourcemanager/confluent/models/TopicsRelatedLink.java @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.models; + +import com.azure.core.annotation.Fluent; +import com.azure.json.JsonReader; +import com.azure.json.JsonSerializable; +import com.azure.json.JsonToken; +import com.azure.json.JsonWriter; +import java.io.IOException; + +/** + * Partition Config spec of the topic record. + */ +@Fluent +public final class TopicsRelatedLink implements JsonSerializable { + /* + * Relationship of the topic + */ + private String related; + + /** + * Creates an instance of TopicsRelatedLink class. + */ + public TopicsRelatedLink() { + } + + /** + * Get the related property: Relationship of the topic. + * + * @return the related value. + */ + public String related() { + return this.related; + } + + /** + * Set the related property: Relationship of the topic. + * + * @param related the related value to set. + * @return the TopicsRelatedLink object itself. + */ + public TopicsRelatedLink withRelated(String related) { + this.related = related; + return this; + } + + /** + * Validates the instance. + * + * @throws IllegalArgumentException thrown if the instance is not valid. + */ + public void validate() { + } + + /** + * {@inheritDoc} + */ + @Override + public JsonWriter toJson(JsonWriter jsonWriter) throws IOException { + jsonWriter.writeStartObject(); + jsonWriter.writeStringField("related", this.related); + return jsonWriter.writeEndObject(); + } + + /** + * Reads an instance of TopicsRelatedLink from the JsonReader. + * + * @param jsonReader The JsonReader being read. + * @return An instance of TopicsRelatedLink if the JsonReader was pointing to an instance of it, or null if it was + * pointing to JSON null. + * @throws IOException If an error occurs while reading the TopicsRelatedLink. + */ + public static TopicsRelatedLink fromJson(JsonReader jsonReader) throws IOException { + return jsonReader.readObject(reader -> { + TopicsRelatedLink deserializedTopicsRelatedLink = new TopicsRelatedLink(); + while (reader.nextToken() != JsonToken.END_OBJECT) { + String fieldName = reader.getFieldName(); + reader.nextToken(); + + if ("related".equals(fieldName)) { + deserializedTopicsRelatedLink.related = reader.getString(); + } else { + reader.skipChildren(); + } + } + + return deserializedTopicsRelatedLink; + }); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-confluent/proxy-config.json b/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-confluent/proxy-config.json index 3e4e90690b77..bacd552be058 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-confluent/proxy-config.json +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/META-INF/native-image/com.azure.resourcemanager/azure-resourcemanager-confluent/proxy-config.json @@ -1 +1 @@ -[["com.azure.resourcemanager.confluent.implementation.AccessClientImpl$AccessService"],["com.azure.resourcemanager.confluent.implementation.MarketplaceAgreementsClientImpl$MarketplaceAgreementsService"],["com.azure.resourcemanager.confluent.implementation.OrganizationOperationsClientImpl$OrganizationOperationsService"],["com.azure.resourcemanager.confluent.implementation.OrganizationsClientImpl$OrganizationsService"],["com.azure.resourcemanager.confluent.implementation.ValidationsClientImpl$ValidationsService"]] \ No newline at end of file +[["com.azure.resourcemanager.confluent.implementation.AccessClientImpl$AccessService"],["com.azure.resourcemanager.confluent.implementation.ClustersClientImpl$ClustersService"],["com.azure.resourcemanager.confluent.implementation.ConnectorsClientImpl$ConnectorsService"],["com.azure.resourcemanager.confluent.implementation.EnvironmentsClientImpl$EnvironmentsService"],["com.azure.resourcemanager.confluent.implementation.MarketplaceAgreementsClientImpl$MarketplaceAgreementsService"],["com.azure.resourcemanager.confluent.implementation.OrganizationOperationsClientImpl$OrganizationOperationsService"],["com.azure.resourcemanager.confluent.implementation.OrganizationsClientImpl$OrganizationsService"],["com.azure.resourcemanager.confluent.implementation.TopicsClientImpl$TopicsService"],["com.azure.resourcemanager.confluent.implementation.ValidationsClientImpl$ValidationsService"]] \ No newline at end of file diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/azure-resourcemanager-confluent.properties b/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/azure-resourcemanager-confluent.properties new file mode 100644 index 000000000000..defbd48204e4 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/main/resources/azure-resourcemanager-confluent.properties @@ -0,0 +1 @@ +version=${project.version} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingSamples.java index 124a79b04912..e37053dde2fd 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingSamples.java @@ -12,7 +12,7 @@ public final class AccessCreateRoleBindingSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_CreateRoleBinding. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_CreateRoleBinding. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingSamples.java index 97c723ddc5e2..8226f0a875d0 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingSamples.java @@ -10,7 +10,7 @@ public final class AccessDeleteRoleBindingSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_DeleteRoleBinding. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_DeleteRoleBinding. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserSamples.java index 91f23f9fbf41..b1f908b28939 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserSamples.java @@ -13,7 +13,7 @@ public final class AccessInviteUserSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_InviteUser.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_InviteUser.json */ /** * Sample code: Access_InviteUser. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListClustersSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListClustersSamples.java index 01b3156b1e57..716be4971b75 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListClustersSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListClustersSamples.java @@ -14,7 +14,7 @@ public final class AccessListClustersSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_ClusterList.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_ClusterList.json */ /** * Sample code: Access_ClusterList. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSamples.java index 6fb16bfd2248..c72328fbd750 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSamples.java @@ -14,7 +14,7 @@ public final class AccessListEnvironmentsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_EnvironmentList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_EnvironmentList. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSamples.java index 65df73bd6fd2..633c8ab45b75 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSamples.java @@ -14,7 +14,7 @@ public final class AccessListInvitationsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_InvitationsList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_InvitationsList. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListSamples.java index b235496e934a..89ab18cc972b 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListSamples.java @@ -13,7 +13,7 @@ */ public final class AccessListRoleBindingNameListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Access_RoleBindingNameList.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSamples.java index 396cbeeb868c..c59fb463d527 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSamples.java @@ -14,7 +14,7 @@ public final class AccessListRoleBindingsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_RoleBindingList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_RoleBindingList. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSamples.java index bb49ccc6042f..8ec5b3a00132 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSamples.java @@ -13,7 +13,7 @@ */ public final class AccessListServiceAccountsSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Access_ServiceAccountsList.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSamples.java index a40203e1678f..feb835208cbd 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSamples.java @@ -14,7 +14,7 @@ public final class AccessListUsersSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Access_UsersList.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Access_UsersList.json */ /** * Sample code: Access_UsersList. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterCreateOrUpdateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterCreateOrUpdateSamples.java new file mode 100644 index 000000000000..b7326c6c4897 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterCreateOrUpdateSamples.java @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +import com.azure.resourcemanager.confluent.models.Package; +import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; +import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; + +/** + * Samples for Cluster CreateOrUpdate. + */ +public final class ClusterCreateOrUpdateSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Cluster_Create.json + */ + /** + * Sample code: Cluster_CreateOrUpdate. + * + * @param manager Entry point to ConfluentManager. + */ + public static void clusterCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.clusters() + .define("cluster-1") + .withExistingEnvironment("myResourceGroup", "myOrganization", "env-1") + .withSpec(new SCClusterSpecEntity().withPackageProperty(Package.ESSENTIALS) + .withRegion("us-east4") + .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("env-1"))) + .create(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterDeleteSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterDeleteSamples.java new file mode 100644 index 000000000000..a6b6cf60afc9 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ClusterDeleteSamples.java @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Cluster Delete. + */ +public final class ClusterDeleteSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Cluster_Delete.json + */ + /** + * Sample code: Cluster_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void clusterDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.clusters() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorCreateOrUpdateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorCreateOrUpdateSamples.java new file mode 100644 index 000000000000..afa49c725ffb --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorCreateOrUpdateSamples.java @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +import com.azure.resourcemanager.confluent.models.AuthType; +import com.azure.resourcemanager.confluent.models.AzureBlobStorageSinkConnectorServiceInfo; +import com.azure.resourcemanager.confluent.models.ConnectorClass; +import com.azure.resourcemanager.confluent.models.ConnectorInfoBase; +import com.azure.resourcemanager.confluent.models.ConnectorType; +import com.azure.resourcemanager.confluent.models.DataFormatType; +import com.azure.resourcemanager.confluent.models.KafkaAzureBlobStorageSinkConnectorInfo; +import java.util.Arrays; + +/** + * Samples for Connector CreateOrUpdate. + */ +public final class ConnectorCreateOrUpdateSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_CreateConnectorByName.json + */ + /** + * Sample code: Connector_CreateOrUpdate. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .define("connector-1") + .withExistingCluster("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de") + .withConnectorBasicInfo(new ConnectorInfoBase().withConnectorType(ConnectorType.SINK) + .withConnectorClass(ConnectorClass.fromString("AZUREBLOBSTORAGESINK")) + .withConnectorName("connector-1")) + .withConnectorServiceTypeInfo( + new AzureBlobStorageSinkConnectorServiceInfo().withStorageAccountName("stcfaccount-1") + .withStorageAccountKey("fakeTokenPlaceholder") + .withStorageContainerName("continer-1")) + .withPartnerConnectorInfo(new KafkaAzureBlobStorageSinkConnectorInfo().withAuthType(AuthType.KAFKA_API_KEY) + .withInputFormat(DataFormatType.JSON) + .withOutputFormat(DataFormatType.JSON) + .withApiKey("fakeTokenPlaceholder") + .withApiSecret("fakeTokenPlaceholder") + .withTopics(Arrays.asList("topic-1")) + .withTopicsDir("topicsDir") + .withFlushSize("1000") + .withMaxTasks("2") + .withTimeInterval("DAILY")) + .create(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorDeleteSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorDeleteSamples.java new file mode 100644 index 000000000000..2533c3b018d5 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorDeleteSamples.java @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Connector Delete. + */ +public final class ConnectorDeleteSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_DeleteConnectorByName.json + */ + /** + * Sample code: Connector_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "connector-1", + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorGetSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorGetSamples.java new file mode 100644 index 000000000000..a463c7fc32f7 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorGetSamples.java @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Connector Get. + */ +public final class ConnectorGetSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_GetConnectorByName.json + */ + /** + * Sample code: Connector_Get. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .getWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "connector-1", + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorListSamples.java new file mode 100644 index 000000000000..ddc10076e58c --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ConnectorListSamples.java @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Connector List. + */ +public final class ConnectorListSamples { + /* + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ + * Organization_ConnectorList.json + */ + /** + * Sample code: Connector_List. + * + * @param manager Entry point to ConfluentManager. + */ + public static void connectorList(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.connectors() + .list("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", 10, null, + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentCreateOrUpdateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentCreateOrUpdateSamples.java new file mode 100644 index 000000000000..90a4fbbfd9e3 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentCreateOrUpdateSamples.java @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +import com.azure.resourcemanager.confluent.models.Package; +import com.azure.resourcemanager.confluent.models.StreamGovernanceConfig; + +/** + * Samples for Environment CreateOrUpdate. + */ +public final class EnvironmentCreateOrUpdateSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Environment_Create.json + */ + /** + * Sample code: Environment_CreateOrUpdate. + * + * @param manager Entry point to ConfluentManager. + */ + public static void environmentCreateOrUpdate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.environments() + .define("env-1") + .withExistingOrganization("myResourceGroup", "myOrganization") + .withStreamGovernanceConfig(new StreamGovernanceConfig().withPackageProperty(Package.ESSENTIALS)) + .create(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentDeleteSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentDeleteSamples.java new file mode 100644 index 000000000000..4d67fde9c33f --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/EnvironmentDeleteSamples.java @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Environment Delete. + */ +public final class EnvironmentDeleteSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Environment_Delete.json + */ + /** + * Sample code: Environment_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void environmentDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.environments() + .delete("myResourceGroup", "myOrganization", "env-12132", com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateSamples.java index 17406021a0cc..38e116ed697f 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateSamples.java @@ -9,7 +9,7 @@ */ public final class MarketplaceAgreementsCreateSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * MarketplaceAgreements_Create.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListSamples.java index bd445943d0bc..57cdc6ff8226 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListSamples.java @@ -9,7 +9,7 @@ */ public final class MarketplaceAgreementsListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * MarketplaceAgreements_List.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateApiKeySamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateApiKeySamples.java index 428b75e277f7..48903ac47711 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateApiKeySamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateApiKeySamples.java @@ -11,15 +11,15 @@ */ public final class OrganizationCreateApiKeySamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_CreateClusterAPIKey.json */ /** - * Sample code: Organization_CreateAPIKey. + * Sample code: Organization_CreateClusterAPIKey. * * @param manager Entry point to ConfluentManager. */ - public static void organizationCreateAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { + public static void organizationCreateClusterAPIKey(com.azure.resourcemanager.confluent.ConfluentManager manager) { manager.organizations() .createApiKeyWithResponse("myResourceGroup", "myOrganization", "env-12132", "clusterId-123", new CreateApiKeyModel().withName("CI kafka access key") diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateSamples.java index 1e5159a5b5b9..1a4911a7c3a8 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationCreateSamples.java @@ -17,7 +17,7 @@ public final class OrganizationCreateSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Create.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Create.json */ /** * Sample code: Organization_Create. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteClusterApiKeySamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteClusterApiKeySamples.java index 1f74af37e523..e6b329e8298d 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteClusterApiKeySamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteClusterApiKeySamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationDeleteClusterApiKeySamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_DeleteClusterAPIKey.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteSamples.java index d320cb003005..ea73cf6204a7 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationDeleteSamples.java @@ -10,7 +10,7 @@ public final class OrganizationDeleteSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Delete.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Delete.json */ /** * Sample code: Confluent_Delete. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetByResourceGroupSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetByResourceGroupSamples.java index 3dcfffb7ee9d..4364382cda35 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetByResourceGroupSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetByResourceGroupSamples.java @@ -10,7 +10,7 @@ public final class OrganizationGetByResourceGroupSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Get.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Get.json */ /** * Sample code: Organization_Get. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterApiKeySamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterApiKeySamples.java index dfda35ff7080..173d6598cc82 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterApiKeySamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterApiKeySamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationGetClusterApiKeySamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_GetClusterAPIKey.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterByIdSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterByIdSamples.java index 8419b2b2ec5c..e0588e4dff3b 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterByIdSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetClusterByIdSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationGetClusterByIdSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_GetClusterById.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetEnvironmentByIdSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetEnvironmentByIdSamples.java index 866d375c81fb..880bd0b815e9 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetEnvironmentByIdSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetEnvironmentByIdSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationGetEnvironmentByIdSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_GetEnvironmentById.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetSchemaRegistryClusterByIdSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetSchemaRegistryClusterByIdSamples.java index a6bd52f510ed..86e81f7739e6 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetSchemaRegistryClusterByIdSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationGetSchemaRegistryClusterByIdSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationGetSchemaRegistryClusterByIdSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_GetSchemaRegistryClusterById.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListByResourceGroupSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListByResourceGroupSamples.java index 1ac26a5e67b3..da844263c64e 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListByResourceGroupSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListByResourceGroupSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationListByResourceGroupSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_ListByResourceGroup.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListClustersSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListClustersSamples.java index b5be4a2a001e..9ba70e02dac5 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListClustersSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListClustersSamples.java @@ -10,7 +10,7 @@ public final class OrganizationListClustersSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_ClusterList. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_ClusterList. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListEnvironmentsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListEnvironmentsSamples.java index 4e8c52787bfc..7c88787899d2 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListEnvironmentsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListEnvironmentsSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationListEnvironmentsSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_EnvironmentList.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListRegionsSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListRegionsSamples.java index 5eb13d1e8ecc..b0a2cb88467a 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListRegionsSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListRegionsSamples.java @@ -14,7 +14,7 @@ public final class OrganizationListRegionsSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_ListRegions. + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_ListRegions. * json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSamples.java index a4c58397030b..bc9fd7dac36e 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_ListBySubscription.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSchemaRegistryClustersSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSchemaRegistryClustersSamples.java index d68b89d9732f..6a6a043ec954 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSchemaRegistryClustersSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationListSchemaRegistryClustersSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationListSchemaRegistryClustersSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Organization_ListSchemaRegistryClusters.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListSamples.java index f7e9b3da5c2c..73ea255cbd47 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListSamples.java @@ -9,7 +9,7 @@ */ public final class OrganizationOperationsListSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * OrganizationOperations_List.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationUpdateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationUpdateSamples.java index 704d85e155f3..a8c1c67c0394 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationUpdateSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/OrganizationUpdateSamples.java @@ -14,7 +14,7 @@ public final class OrganizationUpdateSamples { /* * x-ms-original-file: - * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/Organization_Update.json + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_Update.json */ /** * Sample code: Confluent_Update. diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsCreateSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsCreateSamples.java new file mode 100644 index 000000000000..617818432c4c --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsCreateSamples.java @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +import com.azure.resourcemanager.confluent.models.TopicsInputConfig; +import java.util.Arrays; + +/** + * Samples for Topics Create. + */ +public final class TopicsCreateSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Create.json + */ + /** + * Sample code: Topics_Create. + * + * @param manager Entry point to ConfluentManager. + */ + public static void topicsCreate(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .define("topic-1") + .withExistingCluster("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de") + .withInputConfigs(Arrays.asList(new TopicsInputConfig().withName("cleanup.policy").withValue("compact"), + new TopicsInputConfig().withName("retention.ms").withValue("86400000"))) + .withPartitionsCount("1") + .withReplicationFactor("3") + .create(); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsDeleteSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsDeleteSamples.java new file mode 100644 index 000000000000..b45b957ec74b --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsDeleteSamples.java @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Topics Delete. + */ +public final class TopicsDeleteSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Delete.json + */ + /** + * Sample code: Topics_Delete. + * + * @param manager Entry point to ConfluentManager. + */ + public static void topicsDelete(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .delete("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "topic-1", + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsGetSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsGetSamples.java new file mode 100644 index 000000000000..df81e0da1ef6 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsGetSamples.java @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Topics Get. + */ +public final class TopicsGetSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Topics_Get.json + */ + /** + * Sample code: Topics_Get. + * + * @param manager Entry point to ConfluentManager. + */ + public static void topicsGet(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .getWithResponse("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", "topic-1", + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsListSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsListSamples.java new file mode 100644 index 000000000000..6e037f81d903 --- /dev/null +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/TopicsListSamples.java @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// Code generated by Microsoft (R) AutoRest Code Generator. + +package com.azure.resourcemanager.confluent.generated; + +/** + * Samples for Topics List. + */ +public final class TopicsListSamples { + /* + * x-ms-original-file: + * specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/Organization_TopicList. + * json + */ + /** + * Sample code: Organization_ListTopics. + * + * @param manager Entry point to ConfluentManager. + */ + public static void organizationListTopics(com.azure.resourcemanager.confluent.ConfluentManager manager) { + manager.topics() + .list("myResourceGroup", "myOrganization", "env-12132", "dlz-f3a90de", 10, null, + com.azure.core.util.Context.NONE); + } +} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationSamples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationSamples.java index 3a0f7aea8a7d..57cf42403dca 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationSamples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationSamples.java @@ -16,7 +16,7 @@ */ public final class ValidationsValidateOrganizationSamples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Validations_ValidateOrganizations.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationV2Samples.java b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationV2Samples.java index 062a485e1a7f..f1d7d0f935f1 100644 --- a/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationV2Samples.java +++ b/sdk/confluent/azure-resourcemanager-confluent/src/samples/java/com/azure/resourcemanager/confluent/generated/ValidationsValidateOrganizationV2Samples.java @@ -16,7 +16,7 @@ */ public final class ValidationsValidateOrganizationV2Samples { /* - * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-02-13/examples/ + * x-ms-original-file: specification/confluent/resource-manager/Microsoft.Confluent/stable/2024-07-01/examples/ * Validations_ValidateOrganizationsV2.json */ /** diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingRequestModelTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingRequestModelTests.java deleted file mode 100644 index 5283c5ed3a1c..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingRequestModelTests.java +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.AccessCreateRoleBindingRequestModel; -import org.junit.jupiter.api.Assertions; - -public final class AccessCreateRoleBindingRequestModelTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessCreateRoleBindingRequestModel model = BinaryData - .fromString("{\"principal\":\"i\",\"role_name\":\"byuqerpqlp\",\"crn_pattern\":\"cciuqgbdbutau\"}") - .toObject(AccessCreateRoleBindingRequestModel.class); - Assertions.assertEquals("i", model.principal()); - Assertions.assertEquals("byuqerpqlp", model.roleName()); - Assertions.assertEquals("cciuqgbdbutau", model.crnPattern()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessCreateRoleBindingRequestModel model = new AccessCreateRoleBindingRequestModel().withPrincipal("i") - .withRoleName("byuqerpqlp") - .withCrnPattern("cciuqgbdbutau"); - model = BinaryData.fromObject(model).toObject(AccessCreateRoleBindingRequestModel.class); - Assertions.assertEquals("i", model.principal()); - Assertions.assertEquals("byuqerpqlp", model.roleName()); - Assertions.assertEquals("cciuqgbdbutau", model.crnPattern()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingWithResponseMockTests.java deleted file mode 100644 index fb260b6e1ebc..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessCreateRoleBindingWithResponseMockTests.java +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessCreateRoleBindingRequestModel; -import com.azure.resourcemanager.confluent.models.RoleBindingRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessCreateRoleBindingWithResponseMockTests { - @Test - public void testCreateRoleBindingWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"njlx\",\"id\":\"rhwpus\",\"metadata\":{\"self\":\"aqehg\",\"resource_name\":\"ohzjqatucoigeb\",\"created_at\":\"cnwfepbnwgfmxjg\",\"updated_at\":\"bjb\",\"deleted_at\":\"lfgtdysnaquflqbc\"},\"principal\":\"hamzjrwdkqze\",\"role_name\":\"jleziunjx\",\"crn_pattern\":\"zantkwceg\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - RoleBindingRecord response = manager.access() - .createRoleBindingWithResponse("gepuslvyjtc", "uwkasiz", - new AccessCreateRoleBindingRequestModel().withPrincipal("sfuughtuqfecjx") - .withRoleName("gtuhxuicbu") - .withCrnPattern("mr"), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("njlx", response.kind()); - Assertions.assertEquals("rhwpus", response.id()); - Assertions.assertEquals("aqehg", response.metadata().self()); - Assertions.assertEquals("ohzjqatucoigeb", response.metadata().resourceName()); - Assertions.assertEquals("cnwfepbnwgfmxjg", response.metadata().createdAt()); - Assertions.assertEquals("bjb", response.metadata().updatedAt()); - Assertions.assertEquals("lfgtdysnaquflqbc", response.metadata().deletedAt()); - Assertions.assertEquals("hamzjrwdkqze", response.principal()); - Assertions.assertEquals("jleziunjx", response.roleName()); - Assertions.assertEquals("zantkwceg", response.crnPattern()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingWithResponseMockTests.java deleted file mode 100644 index 8e82de75396e..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessDeleteRoleBindingWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessDeleteRoleBindingWithResponseMockTests { - @Test - public void testDeleteRoleBindingWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.access() - .deleteRoleBindingWithResponse("amlbnseqacjjvpil", "uooqjagmdit", "ueio", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserAccountModelTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserAccountModelTests.java deleted file mode 100644 index 6bbffe5f560a..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserAccountModelTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.AccessInviteUserAccountModel; -import com.azure.resourcemanager.confluent.models.AccessInvitedUserDetails; -import org.junit.jupiter.api.Assertions; - -public final class AccessInviteUserAccountModelTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessInviteUserAccountModel model = BinaryData.fromString( - "{\"organizationId\":\"buxwgip\",\"email\":\"onowk\",\"upn\":\"hwankixzbinjepu\",\"invitedUserDetails\":{\"invitedEmail\":\"rywn\",\"auth_type\":\"oqftiyqzrnkcq\"}}") - .toObject(AccessInviteUserAccountModel.class); - Assertions.assertEquals("buxwgip", model.organizationId()); - Assertions.assertEquals("onowk", model.email()); - Assertions.assertEquals("hwankixzbinjepu", model.upn()); - Assertions.assertEquals("rywn", model.invitedUserDetails().invitedEmail()); - Assertions.assertEquals("oqftiyqzrnkcq", model.invitedUserDetails().authType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessInviteUserAccountModel model = new AccessInviteUserAccountModel().withOrganizationId("buxwgip") - .withEmail("onowk") - .withUpn("hwankixzbinjepu") - .withInvitedUserDetails( - new AccessInvitedUserDetails().withInvitedEmail("rywn").withAuthType("oqftiyqzrnkcq")); - model = BinaryData.fromObject(model).toObject(AccessInviteUserAccountModel.class); - Assertions.assertEquals("buxwgip", model.organizationId()); - Assertions.assertEquals("onowk", model.email()); - Assertions.assertEquals("hwankixzbinjepu", model.upn()); - Assertions.assertEquals("rywn", model.invitedUserDetails().invitedEmail()); - Assertions.assertEquals("oqftiyqzrnkcq", model.invitedUserDetails().authType()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserWithResponseMockTests.java deleted file mode 100644 index 7c4ae99ca93f..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInviteUserWithResponseMockTests.java +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessInviteUserAccountModel; -import com.azure.resourcemanager.confluent.models.AccessInvitedUserDetails; -import com.azure.resourcemanager.confluent.models.InvitationRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessInviteUserWithResponseMockTests { - @Test - public void testInviteUserWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"jlzqnhc\",\"id\":\"ql\",\"metadata\":{\"self\":\"oibgsxg\",\"resource_name\":\"fyq\",\"created_at\":\"mpqoxw\",\"updated_at\":\"fdbxiqxeiiqbim\",\"deleted_at\":\"mwwinhehfqpofv\"},\"email\":\"cblembnkbwv\",\"auth_type\":\"xk\",\"status\":\"vqihebwtswbzuwf\",\"accepted_at\":\"urageg\",\"expires_at\":\"vcjfelisdjubggb\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - InvitationRecord response = manager.access() - .inviteUserWithResponse("phai", "mxyasflvgsgzw", - new AccessInviteUserAccountModel().withOrganizationId("akoi") - .withEmail("nsmjbl") - .withUpn("jhlnymzotqyryu") - .withInvitedUserDetails( - new AccessInvitedUserDetails().withInvitedEmail("mqqvxmvwfgtay").withAuthType("nsup")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("jlzqnhc", response.kind()); - Assertions.assertEquals("ql", response.id()); - Assertions.assertEquals("oibgsxg", response.metadata().self()); - Assertions.assertEquals("fyq", response.metadata().resourceName()); - Assertions.assertEquals("mpqoxw", response.metadata().createdAt()); - Assertions.assertEquals("fdbxiqxeiiqbim", response.metadata().updatedAt()); - Assertions.assertEquals("mwwinhehfqpofv", response.metadata().deletedAt()); - Assertions.assertEquals("cblembnkbwv", response.email()); - Assertions.assertEquals("xk", response.authType()); - Assertions.assertEquals("vqihebwtswbzuwf", response.status()); - Assertions.assertEquals("urageg", response.acceptedAt()); - Assertions.assertEquals("vcjfelisdjubggb", response.expiresAt()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInvitedUserDetailsTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInvitedUserDetailsTests.java deleted file mode 100644 index 074da2b62487..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessInvitedUserDetailsTests.java +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.AccessInvitedUserDetails; -import org.junit.jupiter.api.Assertions; - -public final class AccessInvitedUserDetailsTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessInvitedUserDetails model - = BinaryData.fromString("{\"invitedEmail\":\"xlwhzlsicoh\",\"auth_type\":\"qnwvlrya\"}") - .toObject(AccessInvitedUserDetails.class); - Assertions.assertEquals("xlwhzlsicoh", model.invitedEmail()); - Assertions.assertEquals("qnwvlrya", model.authType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessInvitedUserDetails model - = new AccessInvitedUserDetails().withInvitedEmail("xlwhzlsicoh").withAuthType("qnwvlrya"); - model = BinaryData.fromObject(model).toObject(AccessInvitedUserDetails.class); - Assertions.assertEquals("xlwhzlsicoh", model.invitedEmail()); - Assertions.assertEquals("qnwvlrya", model.authType()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClusterSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClusterSuccessResponseInnerTests.java deleted file mode 100644 index 05988275a95c..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClusterSuccessResponseInnerTests.java +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListClusterSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ClusterByokEntity; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.ClusterNetworkEntity; -import com.azure.resourcemanager.confluent.models.ClusterRecord; -import com.azure.resourcemanager.confluent.models.ClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListClusterSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListClusterSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"ebf\",\"metadata\":{\"first\":\"rbu\",\"last\":\"cvpnazzmhjrunmpx\",\"prev\":\"dbhrbnlankxm\",\"next\":\"k\",\"total_size\":1438226066},\"data\":[{\"kind\":\"tkcxywnytnrsy\",\"id\":\"qidybyx\",\"metadata\":{\"self\":\"clha\",\"resource_name\":\"dbabp\",\"created_at\":\"wrqlfktsthsuco\",\"updated_at\":\"nyyazttbtwwrqpue\",\"deleted_at\":\"kzywbiex\"},\"display_name\":\"eyueaxibxujwb\",\"spec\":{\"display_name\":\"almuzyoxaepdkzja\",\"availability\":\"ux\",\"cloud\":\"d\",\"zone\":\"avxbniwdjswztsdb\",\"region\":\"nxytxh\",\"kafka_bootstrap_endpoint\":\"xbzpfzab\",\"http_endpoint\":\"cuh\",\"api_endpoint\":\"tcty\",\"config\":{\"kind\":\"lbbovplw\"},\"environment\":{\"id\":\"vgyuguos\",\"environment\":\"kfssxqukkf\",\"related\":\"gmgsxnkjzkde\",\"resource_name\":\"pvlopwiyighxpkd\"},\"network\":{\"id\":\"aiuebbaumnyqu\",\"environment\":\"deoj\",\"related\":\"bckhsmtxpsi\",\"resource_name\":\"tfhvpesapskrdqmh\"},\"byok\":{\"id\":\"htldwk\",\"related\":\"xuutkncwscwsv\",\"resource_name\":\"otogtwrupqs\"}},\"status\":{\"phase\":\"micykvceoveilo\",\"cku\":660644635}},{\"kind\":\"yfjfcnjbkcn\",\"id\":\"hbttkphyw\",\"metadata\":{\"self\":\"jtoqne\",\"resource_name\":\"clfp\",\"created_at\":\"hoxus\",\"updated_at\":\"pabgyeps\",\"deleted_at\":\"tazqugxywpmueefj\"},\"display_name\":\"fqkquj\",\"spec\":{\"display_name\":\"uyonobglaoc\",\"availability\":\"tcc\",\"cloud\":\"yudxytlmoy\",\"zone\":\"vwfudwpzntxhd\",\"region\":\"lrqjbhckfr\",\"kafka_bootstrap_endpoint\":\"rxsbkyvp\",\"http_endpoint\":\"anuzbpzkafkuw\",\"api_endpoint\":\"rnwb\",\"config\":{\"kind\":\"hseyvju\"},\"environment\":{\"id\":\"slhs\",\"environment\":\"deemao\",\"related\":\"xagkvtmelmqkrh\",\"resource_name\":\"vljua\"},\"network\":{\"id\":\"uhcdhm\",\"environment\":\"alaexqpvfadmwsrc\",\"related\":\"vxpvgomz\",\"resource_name\":\"misgwbnb\"},\"byok\":{\"id\":\"dawkzbali\",\"related\":\"rqhakauha\",\"resource_name\":\"sfwxosowzxc\"}},\"status\":{\"phase\":\"cjooxdjebwpucwwf\",\"cku\":1446689376}},{\"kind\":\"vmeueci\",\"id\":\"hzceuojgjrwjue\",\"metadata\":{\"self\":\"wmcdytdxwi\",\"resource_name\":\"nrjawgqwg\",\"created_at\":\"ni\",\"updated_at\":\"x\",\"deleted_at\":\"kpycgklwndnhjd\"},\"display_name\":\"whvylw\",\"spec\":{\"display_name\":\"dhxujznbmpo\",\"availability\":\"wpr\",\"cloud\":\"lve\",\"zone\":\"lupj\",\"region\":\"hfxobbcswsrtj\",\"kafka_bootstrap_endpoint\":\"plrbpbewtghf\",\"http_endpoint\":\"lcgwxzvlvqh\",\"api_endpoint\":\"begibtnmxiebwwa\",\"config\":{\"kind\":\"yqcgwrtzjuzgwy\"},\"environment\":{\"id\":\"txon\",\"environment\":\"ts\",\"related\":\"jcbpwxqpsrknft\",\"resource_name\":\"vriuhprwmdyvx\"},\"network\":{\"id\":\"yriwwroyqb\",\"environment\":\"rmcqiby\",\"related\":\"ojvknmefqsgzvaha\",\"resource_name\":\"y\"},\"byok\":{\"id\":\"vgqzcjrvxd\",\"related\":\"lmwlxkvugfhzo\",\"resource_name\":\"wjvzunluthnn\"}},\"status\":{\"phase\":\"xipeilpjzuaejx\",\"cku\":122222845}},{\"kind\":\"skzbb\",\"id\":\"zumveekgpwo\",\"metadata\":{\"self\":\"kfpbs\",\"resource_name\":\"ofd\",\"created_at\":\"uusdttouwa\",\"updated_at\":\"ekqvkeln\",\"deleted_at\":\"vbxwyjsflhh\"},\"display_name\":\"aln\",\"spec\":{\"display_name\":\"isxyawjoyaqcslyj\",\"availability\":\"iidzyexzne\",\"cloud\":\"xhnrztfolhb\",\"zone\":\"knalaulppg\",\"region\":\"tpnapnyiropuhpig\",\"kafka_bootstrap_endpoint\":\"gylgqgitxmedjvcs\",\"http_endpoint\":\"n\",\"api_endpoint\":\"wncwzzhxgktrmg\",\"config\":{\"kind\":\"apkteoellwptfdyg\"},\"environment\":{\"id\":\"b\",\"environment\":\"ceopzfqrhhuaopp\",\"related\":\"qeqxo\",\"resource_name\":\"dahzxctobg\"},\"network\":{\"id\":\"moizpos\",\"environment\":\"grcfb\",\"related\":\"rmfqjhhkxbpvj\",\"resource_name\":\"jhxxjyn\"},\"byok\":{\"id\":\"ivkrtsw\",\"related\":\"qzvszjf\",\"resource_name\":\"vjfdx\"}},\"status\":{\"phase\":\"e\",\"cku\":43385515}}]}") - .toObject(AccessListClusterSuccessResponseInner.class); - Assertions.assertEquals("ebf", model.kind()); - Assertions.assertEquals("rbu", model.metadata().first()); - Assertions.assertEquals("cvpnazzmhjrunmpx", model.metadata().last()); - Assertions.assertEquals("dbhrbnlankxm", model.metadata().prev()); - Assertions.assertEquals("k", model.metadata().next()); - Assertions.assertEquals(1438226066, model.metadata().totalSize()); - Assertions.assertEquals("tkcxywnytnrsy", model.data().get(0).kind()); - Assertions.assertEquals("qidybyx", model.data().get(0).id()); - Assertions.assertEquals("clha", model.data().get(0).metadata().self()); - Assertions.assertEquals("dbabp", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("wrqlfktsthsuco", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("nyyazttbtwwrqpue", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("kzywbiex", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("eyueaxibxujwb", model.data().get(0).displayName()); - Assertions.assertEquals("almuzyoxaepdkzja", model.data().get(0).spec().displayName()); - Assertions.assertEquals("ux", model.data().get(0).spec().availability()); - Assertions.assertEquals("d", model.data().get(0).spec().cloud()); - Assertions.assertEquals("avxbniwdjswztsdb", model.data().get(0).spec().zone()); - Assertions.assertEquals("nxytxh", model.data().get(0).spec().region()); - Assertions.assertEquals("xbzpfzab", model.data().get(0).spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("cuh", model.data().get(0).spec().httpEndpoint()); - Assertions.assertEquals("tcty", model.data().get(0).spec().apiEndpoint()); - Assertions.assertEquals("lbbovplw", model.data().get(0).spec().config().kind()); - Assertions.assertEquals("vgyuguos", model.data().get(0).spec().environment().id()); - Assertions.assertEquals("kfssxqukkf", model.data().get(0).spec().environment().environment()); - Assertions.assertEquals("gmgsxnkjzkde", model.data().get(0).spec().environment().related()); - Assertions.assertEquals("pvlopwiyighxpkd", model.data().get(0).spec().environment().resourceName()); - Assertions.assertEquals("aiuebbaumnyqu", model.data().get(0).spec().network().id()); - Assertions.assertEquals("deoj", model.data().get(0).spec().network().environment()); - Assertions.assertEquals("bckhsmtxpsi", model.data().get(0).spec().network().related()); - Assertions.assertEquals("tfhvpesapskrdqmh", model.data().get(0).spec().network().resourceName()); - Assertions.assertEquals("htldwk", model.data().get(0).spec().byok().id()); - Assertions.assertEquals("xuutkncwscwsv", model.data().get(0).spec().byok().related()); - Assertions.assertEquals("otogtwrupqs", model.data().get(0).spec().byok().resourceName()); - Assertions.assertEquals("micykvceoveilo", model.data().get(0).status().phase()); - Assertions.assertEquals(660644635, model.data().get(0).status().cku()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListClusterSuccessResponseInner model = new AccessListClusterSuccessResponseInner().withKind("ebf") - .withMetadata(new ConfluentListMetadata().withFirst("rbu") - .withLast("cvpnazzmhjrunmpx") - .withPrev("dbhrbnlankxm") - .withNext("k") - .withTotalSize(1438226066)) - .withData(Arrays.asList( - new ClusterRecord().withKind("tkcxywnytnrsy") - .withId("qidybyx") - .withMetadata(new MetadataEntity().withSelf("clha") - .withResourceName("dbabp") - .withCreatedAt("wrqlfktsthsuco") - .withUpdatedAt("nyyazttbtwwrqpue") - .withDeletedAt("kzywbiex")) - .withDisplayName("eyueaxibxujwb") - .withSpec(new ClusterSpecEntity().withDisplayName("almuzyoxaepdkzja") - .withAvailability("ux") - .withCloud("d") - .withZone("avxbniwdjswztsdb") - .withRegion("nxytxh") - .withKafkaBootstrapEndpoint("xbzpfzab") - .withHttpEndpoint("cuh") - .withApiEndpoint("tcty") - .withConfig(new ClusterConfigEntity().withKind("lbbovplw")) - .withEnvironment(new ClusterEnvironmentEntity().withId("vgyuguos") - .withEnvironment("kfssxqukkf") - .withRelated("gmgsxnkjzkde") - .withResourceName("pvlopwiyighxpkd")) - .withNetwork(new ClusterNetworkEntity().withId("aiuebbaumnyqu") - .withEnvironment("deoj") - .withRelated("bckhsmtxpsi") - .withResourceName("tfhvpesapskrdqmh")) - .withByok(new ClusterByokEntity().withId("htldwk") - .withRelated("xuutkncwscwsv") - .withResourceName("otogtwrupqs"))) - .withStatus(new ClusterStatusEntity().withPhase("micykvceoveilo").withCku(660644635)), - new ClusterRecord().withKind("yfjfcnjbkcn") - .withId("hbttkphyw") - .withMetadata(new MetadataEntity().withSelf("jtoqne") - .withResourceName("clfp") - .withCreatedAt("hoxus") - .withUpdatedAt("pabgyeps") - .withDeletedAt("tazqugxywpmueefj")) - .withDisplayName("fqkquj") - .withSpec(new ClusterSpecEntity().withDisplayName("uyonobglaoc") - .withAvailability("tcc") - .withCloud("yudxytlmoy") - .withZone("vwfudwpzntxhd") - .withRegion("lrqjbhckfr") - .withKafkaBootstrapEndpoint("rxsbkyvp") - .withHttpEndpoint("anuzbpzkafkuw") - .withApiEndpoint("rnwb") - .withConfig(new ClusterConfigEntity().withKind("hseyvju")) - .withEnvironment(new ClusterEnvironmentEntity().withId("slhs") - .withEnvironment("deemao") - .withRelated("xagkvtmelmqkrh") - .withResourceName("vljua")) - .withNetwork(new ClusterNetworkEntity().withId("uhcdhm") - .withEnvironment("alaexqpvfadmwsrc") - .withRelated("vxpvgomz") - .withResourceName("misgwbnb")) - .withByok(new ClusterByokEntity().withId("dawkzbali") - .withRelated("rqhakauha") - .withResourceName("sfwxosowzxc"))) - .withStatus(new ClusterStatusEntity().withPhase("cjooxdjebwpucwwf").withCku(1446689376)), - new ClusterRecord().withKind("vmeueci") - .withId("hzceuojgjrwjue") - .withMetadata(new MetadataEntity().withSelf("wmcdytdxwi") - .withResourceName("nrjawgqwg") - .withCreatedAt("ni") - .withUpdatedAt("x") - .withDeletedAt("kpycgklwndnhjd")) - .withDisplayName("whvylw") - .withSpec(new ClusterSpecEntity().withDisplayName("dhxujznbmpo") - .withAvailability("wpr") - .withCloud("lve") - .withZone("lupj") - .withRegion("hfxobbcswsrtj") - .withKafkaBootstrapEndpoint("plrbpbewtghf") - .withHttpEndpoint("lcgwxzvlvqh") - .withApiEndpoint("begibtnmxiebwwa") - .withConfig(new ClusterConfigEntity().withKind("yqcgwrtzjuzgwy")) - .withEnvironment(new ClusterEnvironmentEntity().withId("txon") - .withEnvironment("ts") - .withRelated("jcbpwxqpsrknft") - .withResourceName("vriuhprwmdyvx")) - .withNetwork(new ClusterNetworkEntity().withId("yriwwroyqb") - .withEnvironment("rmcqiby") - .withRelated("ojvknmefqsgzvaha") - .withResourceName("y")) - .withByok(new ClusterByokEntity().withId("vgqzcjrvxd") - .withRelated("lmwlxkvugfhzo") - .withResourceName("wjvzunluthnn"))) - .withStatus(new ClusterStatusEntity().withPhase("xipeilpjzuaejx").withCku(122222845)), - new ClusterRecord().withKind("skzbb") - .withId("zumveekgpwo") - .withMetadata(new MetadataEntity().withSelf("kfpbs") - .withResourceName("ofd") - .withCreatedAt("uusdttouwa") - .withUpdatedAt("ekqvkeln") - .withDeletedAt("vbxwyjsflhh")) - .withDisplayName("aln") - .withSpec(new ClusterSpecEntity().withDisplayName("isxyawjoyaqcslyj") - .withAvailability("iidzyexzne") - .withCloud("xhnrztfolhb") - .withZone("knalaulppg") - .withRegion("tpnapnyiropuhpig") - .withKafkaBootstrapEndpoint("gylgqgitxmedjvcs") - .withHttpEndpoint("n") - .withApiEndpoint("wncwzzhxgktrmg") - .withConfig(new ClusterConfigEntity().withKind("apkteoellwptfdyg")) - .withEnvironment(new ClusterEnvironmentEntity().withId("b") - .withEnvironment("ceopzfqrhhuaopp") - .withRelated("qeqxo") - .withResourceName("dahzxctobg")) - .withNetwork(new ClusterNetworkEntity().withId("moizpos") - .withEnvironment("grcfb") - .withRelated("rmfqjhhkxbpvj") - .withResourceName("jhxxjyn")) - .withByok( - new ClusterByokEntity().withId("ivkrtsw").withRelated("qzvszjf").withResourceName("vjfdx"))) - .withStatus(new ClusterStatusEntity().withPhase("e").withCku(43385515)))); - model = BinaryData.fromObject(model).toObject(AccessListClusterSuccessResponseInner.class); - Assertions.assertEquals("ebf", model.kind()); - Assertions.assertEquals("rbu", model.metadata().first()); - Assertions.assertEquals("cvpnazzmhjrunmpx", model.metadata().last()); - Assertions.assertEquals("dbhrbnlankxm", model.metadata().prev()); - Assertions.assertEquals("k", model.metadata().next()); - Assertions.assertEquals(1438226066, model.metadata().totalSize()); - Assertions.assertEquals("tkcxywnytnrsy", model.data().get(0).kind()); - Assertions.assertEquals("qidybyx", model.data().get(0).id()); - Assertions.assertEquals("clha", model.data().get(0).metadata().self()); - Assertions.assertEquals("dbabp", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("wrqlfktsthsuco", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("nyyazttbtwwrqpue", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("kzywbiex", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("eyueaxibxujwb", model.data().get(0).displayName()); - Assertions.assertEquals("almuzyoxaepdkzja", model.data().get(0).spec().displayName()); - Assertions.assertEquals("ux", model.data().get(0).spec().availability()); - Assertions.assertEquals("d", model.data().get(0).spec().cloud()); - Assertions.assertEquals("avxbniwdjswztsdb", model.data().get(0).spec().zone()); - Assertions.assertEquals("nxytxh", model.data().get(0).spec().region()); - Assertions.assertEquals("xbzpfzab", model.data().get(0).spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("cuh", model.data().get(0).spec().httpEndpoint()); - Assertions.assertEquals("tcty", model.data().get(0).spec().apiEndpoint()); - Assertions.assertEquals("lbbovplw", model.data().get(0).spec().config().kind()); - Assertions.assertEquals("vgyuguos", model.data().get(0).spec().environment().id()); - Assertions.assertEquals("kfssxqukkf", model.data().get(0).spec().environment().environment()); - Assertions.assertEquals("gmgsxnkjzkde", model.data().get(0).spec().environment().related()); - Assertions.assertEquals("pvlopwiyighxpkd", model.data().get(0).spec().environment().resourceName()); - Assertions.assertEquals("aiuebbaumnyqu", model.data().get(0).spec().network().id()); - Assertions.assertEquals("deoj", model.data().get(0).spec().network().environment()); - Assertions.assertEquals("bckhsmtxpsi", model.data().get(0).spec().network().related()); - Assertions.assertEquals("tfhvpesapskrdqmh", model.data().get(0).spec().network().resourceName()); - Assertions.assertEquals("htldwk", model.data().get(0).spec().byok().id()); - Assertions.assertEquals("xuutkncwscwsv", model.data().get(0).spec().byok().related()); - Assertions.assertEquals("otogtwrupqs", model.data().get(0).spec().byok().resourceName()); - Assertions.assertEquals("micykvceoveilo", model.data().get(0).status().phase()); - Assertions.assertEquals(660644635, model.data().get(0).status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClustersWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClustersWithResponseMockTests.java deleted file mode 100644 index 003ff95c8c38..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListClustersWithResponseMockTests.java +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListClusterSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListClustersWithResponseMockTests { - @Test - public void testListClustersWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"iqg\",\"metadata\":{\"first\":\"okzrus\",\"last\":\"vhczznvfby\",\"prev\":\"sxjwwixz\",\"next\":\"mwmxqhndvnoamld\",\"total_size\":1759834581},\"data\":[{\"kind\":\"djh\",\"id\":\"lzok\",\"metadata\":{\"self\":\"xpelnjetagltsx\",\"resource_name\":\"tft\",\"created_at\":\"pnpbswveflocc\",\"updated_at\":\"mozi\",\"deleted_at\":\"ipgawtxx\"},\"display_name\":\"y\",\"spec\":{\"display_name\":\"cjxgrytf\",\"availability\":\"cy\",\"cloud\":\"lrmcaykg\",\"zone\":\"oxuztrksx\",\"region\":\"ndfcpfn\",\"kafka_bootstrap_endpoint\":\"thjtwk\",\"http_endpoint\":\"osrxuzvoa\",\"api_endpoint\":\"tcqiosmg\",\"config\":{\"kind\":\"hgxqdlyrt\"},\"environment\":{\"id\":\"ap\",\"environment\":\"tz\",\"related\":\"tbhjmznnbsoqe\",\"resource_name\":\"larvlagunbtg\"},\"network\":{\"id\":\"wlnbm\",\"environment\":\"reeudzqavb\",\"related\":\"qmjxlyyzglgouwtl\",\"resource_name\":\"jyuojqtobaxkjeyt\"},\"byok\":{\"id\":\"bfjkw\",\"related\":\"snkq\",\"resource_name\":\"syrq\"}},\"status\":{\"phase\":\"qhd\",\"cku\":2136418443}},{\"kind\":\"ulkpakd\",\"id\":\"fmjnnawtqa\",\"metadata\":{\"self\":\"uckpggqoweyir\",\"resource_name\":\"lisn\",\"created_at\":\"fl\",\"updated_at\":\"mpizru\",\"deleted_at\":\"pqxpx\"},\"display_name\":\"fcngjsa\",\"spec\":{\"display_name\":\"ixtmkzjvkviirhgf\",\"availability\":\"wsdpgratzvzb\",\"cloud\":\"byvi\",\"zone\":\"ctbrxkjzwrgxffm\",\"region\":\"kwfbkgo\",\"kafka_bootstrap_endpoint\":\"wopdbydpiz\",\"http_endpoint\":\"clnapxbiygnugjkn\",\"api_endpoint\":\"mfcttux\",\"config\":{\"kind\":\"i\"},\"environment\":{\"id\":\"qoiquvrehmrnjhv\",\"environment\":\"jztczytqj\",\"related\":\"h\",\"resource_name\":\"unfprnjletlxs\"},\"network\":{\"id\":\"ddoui\",\"environment\":\"mowaziynknlqwzdv\",\"related\":\"w\",\"resource_name\":\"qszdtmaajquhuxyl\"},\"byok\":{\"id\":\"m\",\"related\":\"g\",\"resource_name\":\"mzyospspshck\"}},\"status\":{\"phase\":\"jpmspbpssdfppy\",\"cku\":1546979917}},{\"kind\":\"eyujtvczkcnyxrx\",\"id\":\"njdxvglnkvxl\",\"metadata\":{\"self\":\"glqivbgkcv\",\"resource_name\":\"pzvuqdflvo\",\"created_at\":\"yp\",\"updated_at\":\"ubcpzgpxti\",\"deleted_at\":\"j\"},\"display_name\":\"idibgqjxgpn\",\"spec\":{\"display_name\":\"ov\",\"availability\":\"pikqmh\",\"cloud\":\"owjrmzvuporqz\",\"zone\":\"uydzvk\",\"region\":\"xcnqmxqpswokmvkh\",\"kafka_bootstrap_endpoint\":\"gdhbe\",\"http_endpoint\":\"qkzszuwiwtglxxh\",\"api_endpoint\":\"fpgpicrmnzhrgm\",\"config\":{\"kind\":\"sxvpqcbfrmbodths\"},\"environment\":{\"id\":\"vriibakclacjfr\",\"environment\":\"ousxauzlwvsgmw\",\"related\":\"qf\",\"resource_name\":\"zvuxm\"},\"network\":{\"id\":\"svth\",\"environment\":\"pz\",\"related\":\"kovmribiatt\",\"resource_name\":\"lu\"},\"byok\":{\"id\":\"tangcfhnykzcu\",\"related\":\"wvxwlmzqwmvt\",\"resource_name\":\"jmxmcuqud\"}},\"status\":{\"phase\":\"clxyn\",\"cku\":267466114}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListClusterSuccessResponse response = manager.access() - .listClustersWithResponse("izcil", "ghgshejjtbxqmu", - new ListAccessRequestModel().withSearchFilters(mapOf("rsbycucrwn", "lxqzvn", "qbsms", "mikzeb")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("iqg", response.kind()); - Assertions.assertEquals("okzrus", response.metadata().first()); - Assertions.assertEquals("vhczznvfby", response.metadata().last()); - Assertions.assertEquals("sxjwwixz", response.metadata().prev()); - Assertions.assertEquals("mwmxqhndvnoamld", response.metadata().next()); - Assertions.assertEquals(1759834581, response.metadata().totalSize()); - Assertions.assertEquals("djh", response.data().get(0).kind()); - Assertions.assertEquals("lzok", response.data().get(0).id()); - Assertions.assertEquals("xpelnjetagltsx", response.data().get(0).metadata().self()); - Assertions.assertEquals("tft", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("pnpbswveflocc", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("mozi", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("ipgawtxx", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("y", response.data().get(0).displayName()); - Assertions.assertEquals("cjxgrytf", response.data().get(0).spec().displayName()); - Assertions.assertEquals("cy", response.data().get(0).spec().availability()); - Assertions.assertEquals("lrmcaykg", response.data().get(0).spec().cloud()); - Assertions.assertEquals("oxuztrksx", response.data().get(0).spec().zone()); - Assertions.assertEquals("ndfcpfn", response.data().get(0).spec().region()); - Assertions.assertEquals("thjtwk", response.data().get(0).spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("osrxuzvoa", response.data().get(0).spec().httpEndpoint()); - Assertions.assertEquals("tcqiosmg", response.data().get(0).spec().apiEndpoint()); - Assertions.assertEquals("hgxqdlyrt", response.data().get(0).spec().config().kind()); - Assertions.assertEquals("ap", response.data().get(0).spec().environment().id()); - Assertions.assertEquals("tz", response.data().get(0).spec().environment().environment()); - Assertions.assertEquals("tbhjmznnbsoqe", response.data().get(0).spec().environment().related()); - Assertions.assertEquals("larvlagunbtg", response.data().get(0).spec().environment().resourceName()); - Assertions.assertEquals("wlnbm", response.data().get(0).spec().network().id()); - Assertions.assertEquals("reeudzqavb", response.data().get(0).spec().network().environment()); - Assertions.assertEquals("qmjxlyyzglgouwtl", response.data().get(0).spec().network().related()); - Assertions.assertEquals("jyuojqtobaxkjeyt", response.data().get(0).spec().network().resourceName()); - Assertions.assertEquals("bfjkw", response.data().get(0).spec().byok().id()); - Assertions.assertEquals("snkq", response.data().get(0).spec().byok().related()); - Assertions.assertEquals("syrq", response.data().get(0).spec().byok().resourceName()); - Assertions.assertEquals("qhd", response.data().get(0).status().phase()); - Assertions.assertEquals(2136418443, response.data().get(0).status().cku()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSuccessResponseInnerTests.java deleted file mode 100644 index 1a2ee2d3aaf8..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsSuccessResponseInnerTests.java +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListEnvironmentsSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.EnvironmentRecord; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListEnvironmentsSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListEnvironmentsSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"hheunmmqhgyx\",\"metadata\":{\"first\":\"noc\",\"last\":\"oklyaxuconuq\",\"prev\":\"fkbey\",\"next\":\"wrmjmwvvjektc\",\"total_size\":1803945661},\"data\":[{\"kind\":\"lrsf\",\"id\":\"zpwv\",\"metadata\":{\"self\":\"q\",\"resource_name\":\"iqylihkaetck\",\"created_at\":\"fcivfsnkym\",\"updated_at\":\"tqhjfbebrjcx\",\"deleted_at\":\"fuwutttxf\"},\"display_name\":\"rbirphxe\"},{\"kind\":\"yva\",\"id\":\"nljky\",\"metadata\":{\"self\":\"vuujq\",\"resource_name\":\"dokgjl\",\"created_at\":\"oxgvclt\",\"updated_at\":\"sncghkjeszz\",\"deleted_at\":\"ijhtxf\"},\"display_name\":\"xbf\"}]}") - .toObject(AccessListEnvironmentsSuccessResponseInner.class); - Assertions.assertEquals("hheunmmqhgyx", model.kind()); - Assertions.assertEquals("noc", model.metadata().first()); - Assertions.assertEquals("oklyaxuconuq", model.metadata().last()); - Assertions.assertEquals("fkbey", model.metadata().prev()); - Assertions.assertEquals("wrmjmwvvjektc", model.metadata().next()); - Assertions.assertEquals(1803945661, model.metadata().totalSize()); - Assertions.assertEquals("lrsf", model.data().get(0).kind()); - Assertions.assertEquals("zpwv", model.data().get(0).id()); - Assertions.assertEquals("q", model.data().get(0).metadata().self()); - Assertions.assertEquals("iqylihkaetck", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("fcivfsnkym", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("tqhjfbebrjcx", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("fuwutttxf", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("rbirphxe", model.data().get(0).displayName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListEnvironmentsSuccessResponseInner model - = new AccessListEnvironmentsSuccessResponseInner().withKind("hheunmmqhgyx") - .withMetadata(new ConfluentListMetadata().withFirst("noc") - .withLast("oklyaxuconuq") - .withPrev("fkbey") - .withNext("wrmjmwvvjektc") - .withTotalSize(1803945661)) - .withData(Arrays.asList( - new EnvironmentRecord().withKind("lrsf") - .withId("zpwv") - .withMetadata(new MetadataEntity().withSelf("q") - .withResourceName("iqylihkaetck") - .withCreatedAt("fcivfsnkym") - .withUpdatedAt("tqhjfbebrjcx") - .withDeletedAt("fuwutttxf")) - .withDisplayName("rbirphxe"), - new EnvironmentRecord().withKind("yva") - .withId("nljky") - .withMetadata(new MetadataEntity().withSelf("vuujq") - .withResourceName("dokgjl") - .withCreatedAt("oxgvclt") - .withUpdatedAt("sncghkjeszz") - .withDeletedAt("ijhtxf")) - .withDisplayName("xbf"))); - model = BinaryData.fromObject(model).toObject(AccessListEnvironmentsSuccessResponseInner.class); - Assertions.assertEquals("hheunmmqhgyx", model.kind()); - Assertions.assertEquals("noc", model.metadata().first()); - Assertions.assertEquals("oklyaxuconuq", model.metadata().last()); - Assertions.assertEquals("fkbey", model.metadata().prev()); - Assertions.assertEquals("wrmjmwvvjektc", model.metadata().next()); - Assertions.assertEquals(1803945661, model.metadata().totalSize()); - Assertions.assertEquals("lrsf", model.data().get(0).kind()); - Assertions.assertEquals("zpwv", model.data().get(0).id()); - Assertions.assertEquals("q", model.data().get(0).metadata().self()); - Assertions.assertEquals("iqylihkaetck", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("fcivfsnkym", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("tqhjfbebrjcx", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("fuwutttxf", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("rbirphxe", model.data().get(0).displayName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsWithResponseMockTests.java deleted file mode 100644 index 4e2adcecf456..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListEnvironmentsWithResponseMockTests.java +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListEnvironmentsSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListEnvironmentsWithResponseMockTests { - @Test - public void testListEnvironmentsWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"tkvnlvxbcuiiznkt\",\"metadata\":{\"first\":\"nsnvpd\",\"last\":\"mik\",\"prev\":\"tbzbkiwbuqnyophz\",\"next\":\"l\",\"total_size\":348954725},\"data\":[{\"kind\":\"bcunezzceze\",\"id\":\"w\",\"metadata\":{\"self\":\"lwxjwetn\",\"resource_name\":\"ihclafzv\",\"created_at\":\"lpt\",\"updated_at\":\"qqwzt\",\"deleted_at\":\"w\"},\"display_name\":\"chcxwaxfewzj\"},{\"kind\":\"exfdeqvhpsylk\",\"id\":\"hkbffmbm\",\"metadata\":{\"self\":\"rgywwp\",\"resource_name\":\"xs\",\"created_at\":\"tf\",\"updated_at\":\"gicgaaoepttaq\",\"deleted_at\":\"dewemxswv\"},\"display_name\":\"unzzjgehk\"},{\"kind\":\"imrt\",\"id\":\"okffqyinljqepqwh\",\"metadata\":{\"self\":\"onsts\",\"resource_name\":\"yxgvelfcld\",\"created_at\":\"cb\",\"updated_at\":\"ds\",\"deleted_at\":\"wcobie\"},\"display_name\":\"tmninw\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListEnvironmentsSuccessResponse response = manager.access() - .listEnvironmentsWithResponse("igkxkbsazga", "gacyrcmjdmspo", - new ListAccessRequestModel().withSearchFilters(mapOf("frzgbzjed", "vuhrylni")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("tkvnlvxbcuiiznkt", response.kind()); - Assertions.assertEquals("nsnvpd", response.metadata().first()); - Assertions.assertEquals("mik", response.metadata().last()); - Assertions.assertEquals("tbzbkiwbuqnyophz", response.metadata().prev()); - Assertions.assertEquals("l", response.metadata().next()); - Assertions.assertEquals(348954725, response.metadata().totalSize()); - Assertions.assertEquals("bcunezzceze", response.data().get(0).kind()); - Assertions.assertEquals("w", response.data().get(0).id()); - Assertions.assertEquals("lwxjwetn", response.data().get(0).metadata().self()); - Assertions.assertEquals("ihclafzv", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("lpt", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("qqwzt", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("w", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("chcxwaxfewzj", response.data().get(0).displayName()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSuccessResponseInnerTests.java deleted file mode 100644 index 756f7aaebfa1..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsSuccessResponseInnerTests.java +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListInvitationsSuccessResponseInner; -import com.azure.resourcemanager.confluent.fluent.models.InvitationRecordInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListInvitationsSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListInvitationsSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"okacspk\",\"metadata\":{\"first\":\"zdobpxjmflbvvnch\",\"last\":\"cciw\",\"prev\":\"juqk\",\"next\":\"sa\",\"total_size\":1410521625},\"data\":[{\"kind\":\"foskghsauuimj\",\"id\":\"xieduugidyjrr\",\"metadata\":{\"self\":\"aos\",\"resource_name\":\"xc\",\"created_at\":\"npc\",\"updated_at\":\"ocohslkevleg\",\"deleted_at\":\"fbuhfmvfaxkffe\"},\"email\":\"th\",\"auth_type\":\"m\",\"status\":\"yvshxmz\",\"accepted_at\":\"bzoggigrx\",\"expires_at\":\"ur\"}]}") - .toObject(AccessListInvitationsSuccessResponseInner.class); - Assertions.assertEquals("okacspk", model.kind()); - Assertions.assertEquals("zdobpxjmflbvvnch", model.metadata().first()); - Assertions.assertEquals("cciw", model.metadata().last()); - Assertions.assertEquals("juqk", model.metadata().prev()); - Assertions.assertEquals("sa", model.metadata().next()); - Assertions.assertEquals(1410521625, model.metadata().totalSize()); - Assertions.assertEquals("foskghsauuimj", model.data().get(0).kind()); - Assertions.assertEquals("xieduugidyjrr", model.data().get(0).id()); - Assertions.assertEquals("aos", model.data().get(0).metadata().self()); - Assertions.assertEquals("xc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("npc", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("ocohslkevleg", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("fbuhfmvfaxkffe", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("th", model.data().get(0).email()); - Assertions.assertEquals("m", model.data().get(0).authType()); - Assertions.assertEquals("yvshxmz", model.data().get(0).status()); - Assertions.assertEquals("bzoggigrx", model.data().get(0).acceptedAt()); - Assertions.assertEquals("ur", model.data().get(0).expiresAt()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListInvitationsSuccessResponseInner model - = new AccessListInvitationsSuccessResponseInner().withKind("okacspk") - .withMetadata(new ConfluentListMetadata().withFirst("zdobpxjmflbvvnch") - .withLast("cciw") - .withPrev("juqk") - .withNext("sa") - .withTotalSize(1410521625)) - .withData(Arrays.asList(new InvitationRecordInner().withKind("foskghsauuimj") - .withId("xieduugidyjrr") - .withMetadata(new MetadataEntity().withSelf("aos") - .withResourceName("xc") - .withCreatedAt("npc") - .withUpdatedAt("ocohslkevleg") - .withDeletedAt("fbuhfmvfaxkffe")) - .withEmail("th") - .withAuthType("m") - .withStatus("yvshxmz") - .withAcceptedAt("bzoggigrx") - .withExpiresAt("ur"))); - model = BinaryData.fromObject(model).toObject(AccessListInvitationsSuccessResponseInner.class); - Assertions.assertEquals("okacspk", model.kind()); - Assertions.assertEquals("zdobpxjmflbvvnch", model.metadata().first()); - Assertions.assertEquals("cciw", model.metadata().last()); - Assertions.assertEquals("juqk", model.metadata().prev()); - Assertions.assertEquals("sa", model.metadata().next()); - Assertions.assertEquals(1410521625, model.metadata().totalSize()); - Assertions.assertEquals("foskghsauuimj", model.data().get(0).kind()); - Assertions.assertEquals("xieduugidyjrr", model.data().get(0).id()); - Assertions.assertEquals("aos", model.data().get(0).metadata().self()); - Assertions.assertEquals("xc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("npc", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("ocohslkevleg", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("fbuhfmvfaxkffe", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("th", model.data().get(0).email()); - Assertions.assertEquals("m", model.data().get(0).authType()); - Assertions.assertEquals("yvshxmz", model.data().get(0).status()); - Assertions.assertEquals("bzoggigrx", model.data().get(0).acceptedAt()); - Assertions.assertEquals("ur", model.data().get(0).expiresAt()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsWithResponseMockTests.java deleted file mode 100644 index 154d79c92272..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListInvitationsWithResponseMockTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListInvitationsSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListInvitationsWithResponseMockTests { - @Test - public void testListInvitationsWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"jwogqqnobpudc\",\"metadata\":{\"first\":\"tqwpwya\",\"last\":\"zasqbucljgkyexao\",\"prev\":\"yaipidsda\",\"next\":\"tx\",\"total_size\":634071043},\"data\":[{\"kind\":\"qwazlnqnmcjngzq\",\"id\":\"xtbjwgnyfusfzsv\",\"metadata\":{\"self\":\"kzhajqglcfhm\",\"resource_name\":\"qryxyn\",\"created_at\":\"zrdpsovwxznptgoe\",\"updated_at\":\"bbabp\",\"deleted_at\":\"vf\"},\"email\":\"kvntjlrigjkskyri\",\"auth_type\":\"vzidsxwaab\",\"status\":\"ifrygzn\",\"accepted_at\":\"axri\",\"expires_at\":\"zob\"},{\"kind\":\"pxl\",\"id\":\"lnelxieixynl\",\"metadata\":{\"self\":\"cwcrojphs\",\"resource_name\":\"cawjutifdwfmvi\",\"created_at\":\"rqjb\",\"updated_at\":\"zhraglkafh\",\"deleted_at\":\"qjujeickpzvcp\"},\"email\":\"mxelnwcltyjed\",\"auth_type\":\"xm\",\"status\":\"mkqscaz\",\"accepted_at\":\"wxtzxpuamwab\",\"expires_at\":\"rvxcush\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListInvitationsSuccessResponse response = manager.access() - .listInvitationsWithResponse("paq", "ferr", - new ListAccessRequestModel().withSearchFilters(mapOf("kmfx", "x")), com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("jwogqqnobpudc", response.kind()); - Assertions.assertEquals("tqwpwya", response.metadata().first()); - Assertions.assertEquals("zasqbucljgkyexao", response.metadata().last()); - Assertions.assertEquals("yaipidsda", response.metadata().prev()); - Assertions.assertEquals("tx", response.metadata().next()); - Assertions.assertEquals(634071043, response.metadata().totalSize()); - Assertions.assertEquals("qwazlnqnmcjngzq", response.data().get(0).kind()); - Assertions.assertEquals("xtbjwgnyfusfzsv", response.data().get(0).id()); - Assertions.assertEquals("kzhajqglcfhm", response.data().get(0).metadata().self()); - Assertions.assertEquals("qryxyn", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("zrdpsovwxznptgoe", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("bbabp", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("vf", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("kvntjlrigjkskyri", response.data().get(0).email()); - Assertions.assertEquals("vzidsxwaab", response.data().get(0).authType()); - Assertions.assertEquals("ifrygzn", response.data().get(0).status()); - Assertions.assertEquals("axri", response.data().get(0).acceptedAt()); - Assertions.assertEquals("zob", response.data().get(0).expiresAt()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListWithResponseMockTests.java deleted file mode 100644 index 10a5e6508daa..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingNameListWithResponseMockTests.java +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessRoleBindingNameListSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListRoleBindingNameListWithResponseMockTests { - @Test - public void testListRoleBindingNameListWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"tuwkffdj\",\"metadata\":{\"first\":\"ysidfvclgl\",\"last\":\"fuijtkbus\",\"prev\":\"gsfikayiansha\",\"next\":\"jtjiqx\",\"total_size\":1119516110},\"data\":[\"ttvwkpqh\"]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessRoleBindingNameListSuccessResponse response = manager.access() - .listRoleBindingNameListWithResponse("kjbsah", "tdtpdelqacslmo", - new ListAccessRequestModel() - .withSearchFilters(mapOf("k", "bnfxofvc", "xejw", "dirazf", "jurbuhhlkyqltqsr", "bmdujtmvcopexc")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("tuwkffdj", response.kind()); - Assertions.assertEquals("ysidfvclgl", response.metadata().first()); - Assertions.assertEquals("fuijtkbus", response.metadata().last()); - Assertions.assertEquals("gsfikayiansha", response.metadata().prev()); - Assertions.assertEquals("jtjiqx", response.metadata().next()); - Assertions.assertEquals(1119516110, response.metadata().totalSize()); - Assertions.assertEquals("ttvwkpqh", response.data().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSuccessResponseInnerTests.java deleted file mode 100644 index d7dea7d511eb..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsSuccessResponseInnerTests.java +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListRoleBindingsSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.RoleBindingRecord; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListRoleBindingsSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListRoleBindingsSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"bzdopcj\",\"metadata\":{\"first\":\"hdldwmgxcxrsl\",\"last\":\"utwu\",\"prev\":\"grpkhjwniyqs\",\"next\":\"i\",\"total_size\":1317619791},\"data\":[{\"kind\":\"zzlvmbmpaxmodfv\",\"id\":\"fy\",\"metadata\":{\"self\":\"pfvmwyhrfou\",\"resource_name\":\"taakc\",\"created_at\":\"iyzvqtmnub\",\"updated_at\":\"kpzksmondjmq\",\"deleted_at\":\"vypomgkopkwho\"},\"principal\":\"pajqgxysm\",\"role_name\":\"mbqfqvmk\",\"crn_pattern\":\"oz\"}]}") - .toObject(AccessListRoleBindingsSuccessResponseInner.class); - Assertions.assertEquals("bzdopcj", model.kind()); - Assertions.assertEquals("hdldwmgxcxrsl", model.metadata().first()); - Assertions.assertEquals("utwu", model.metadata().last()); - Assertions.assertEquals("grpkhjwniyqs", model.metadata().prev()); - Assertions.assertEquals("i", model.metadata().next()); - Assertions.assertEquals(1317619791, model.metadata().totalSize()); - Assertions.assertEquals("zzlvmbmpaxmodfv", model.data().get(0).kind()); - Assertions.assertEquals("fy", model.data().get(0).id()); - Assertions.assertEquals("pfvmwyhrfou", model.data().get(0).metadata().self()); - Assertions.assertEquals("taakc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("iyzvqtmnub", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("kpzksmondjmq", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("vypomgkopkwho", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("pajqgxysm", model.data().get(0).principal()); - Assertions.assertEquals("mbqfqvmk", model.data().get(0).roleName()); - Assertions.assertEquals("oz", model.data().get(0).crnPattern()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListRoleBindingsSuccessResponseInner model - = new AccessListRoleBindingsSuccessResponseInner().withKind("bzdopcj") - .withMetadata(new ConfluentListMetadata().withFirst("hdldwmgxcxrsl") - .withLast("utwu") - .withPrev("grpkhjwniyqs") - .withNext("i") - .withTotalSize(1317619791)) - .withData(Arrays.asList(new RoleBindingRecord().withKind("zzlvmbmpaxmodfv") - .withId("fy") - .withMetadata(new MetadataEntity().withSelf("pfvmwyhrfou") - .withResourceName("taakc") - .withCreatedAt("iyzvqtmnub") - .withUpdatedAt("kpzksmondjmq") - .withDeletedAt("vypomgkopkwho")) - .withPrincipal("pajqgxysm") - .withRoleName("mbqfqvmk") - .withCrnPattern("oz"))); - model = BinaryData.fromObject(model).toObject(AccessListRoleBindingsSuccessResponseInner.class); - Assertions.assertEquals("bzdopcj", model.kind()); - Assertions.assertEquals("hdldwmgxcxrsl", model.metadata().first()); - Assertions.assertEquals("utwu", model.metadata().last()); - Assertions.assertEquals("grpkhjwniyqs", model.metadata().prev()); - Assertions.assertEquals("i", model.metadata().next()); - Assertions.assertEquals(1317619791, model.metadata().totalSize()); - Assertions.assertEquals("zzlvmbmpaxmodfv", model.data().get(0).kind()); - Assertions.assertEquals("fy", model.data().get(0).id()); - Assertions.assertEquals("pfvmwyhrfou", model.data().get(0).metadata().self()); - Assertions.assertEquals("taakc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("iyzvqtmnub", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("kpzksmondjmq", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("vypomgkopkwho", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("pajqgxysm", model.data().get(0).principal()); - Assertions.assertEquals("mbqfqvmk", model.data().get(0).roleName()); - Assertions.assertEquals("oz", model.data().get(0).crnPattern()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsWithResponseMockTests.java deleted file mode 100644 index f4f015afb6bd..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListRoleBindingsWithResponseMockTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListRoleBindingsSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListRoleBindingsWithResponseMockTests { - @Test - public void testListRoleBindingsWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"qylkmqpzoyhlf\",\"metadata\":{\"first\":\"wgcloxoebqinji\",\"last\":\"wjfuj\",\"prev\":\"afcba\",\"next\":\"pzpofoiyjw\",\"total_size\":1217605698},\"data\":[{\"kind\":\"kkholvdndvia\",\"id\":\"gphuartvtiu\",\"metadata\":{\"self\":\"fchnmnah\",\"resource_name\":\"xhk\",\"created_at\":\"qirwrweoox\",\"updated_at\":\"i\",\"deleted_at\":\"xwrsnew\"},\"principal\":\"zqvbubqm\",\"role_name\":\"hsycxhxzgaz\",\"crn_pattern\":\"abo\"},{\"kind\":\"vmfqhppubo\",\"id\":\"epdfgkmtdherng\",\"metadata\":{\"self\":\"juahokqto\",\"resource_name\":\"auxofshfph\",\"created_at\":\"nulaiywzejywhsl\",\"updated_at\":\"ojpllndnpdwrpqaf\",\"deleted_at\":\"ug\"},\"principal\":\"n\",\"role_name\":\"yetefyp\",\"crn_pattern\":\"octfjgtixrjvzuyt\"},{\"kind\":\"mlmuowol\",\"id\":\"uir\",\"metadata\":{\"self\":\"ons\",\"resource_name\":\"nw\",\"created_at\":\"gajinnixjawrtmj\",\"updated_at\":\"myccx\",\"deleted_at\":\"hcoxov\"},\"principal\":\"khenlus\",\"role_name\":\"rd\",\"crn_pattern\":\"xtxrdcqtjvidt\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListRoleBindingsSuccessResponse response = manager.access() - .listRoleBindingsWithResponse("vgfab", "iyji", - new ListAccessRequestModel() - .withSearchFilters(mapOf("neiknpg", "phdu", "btozipqwje", "xgjiuqh", "xxgewpk", "mur")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("qylkmqpzoyhlf", response.kind()); - Assertions.assertEquals("wgcloxoebqinji", response.metadata().first()); - Assertions.assertEquals("wjfuj", response.metadata().last()); - Assertions.assertEquals("afcba", response.metadata().prev()); - Assertions.assertEquals("pzpofoiyjw", response.metadata().next()); - Assertions.assertEquals(1217605698, response.metadata().totalSize()); - Assertions.assertEquals("kkholvdndvia", response.data().get(0).kind()); - Assertions.assertEquals("gphuartvtiu", response.data().get(0).id()); - Assertions.assertEquals("fchnmnah", response.data().get(0).metadata().self()); - Assertions.assertEquals("xhk", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("qirwrweoox", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("i", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("xwrsnew", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("zqvbubqm", response.data().get(0).principal()); - Assertions.assertEquals("hsycxhxzgaz", response.data().get(0).roleName()); - Assertions.assertEquals("abo", response.data().get(0).crnPattern()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSuccessResponseInnerTests.java deleted file mode 100644 index 3cc88fe67a27..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsSuccessResponseInnerTests.java +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListServiceAccountsSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.ServiceAccountRecord; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListServiceAccountsSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListServiceAccountsSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"cbonqvpk\",\"metadata\":{\"first\":\"xnj\",\"last\":\"seiphe\",\"prev\":\"lokeyy\",\"next\":\"nj\",\"total_size\":147839675},\"data\":[{\"kind\":\"rhpdjpjumas\",\"id\":\"zj\",\"metadata\":{\"self\":\"e\",\"resource_name\":\"alhbx\",\"created_at\":\"e\",\"updated_at\":\"zzvdudgwds\",\"deleted_at\":\"hotwmcynpwlbjnp\"},\"display_name\":\"cftadeh\",\"description\":\"ltyfsop\"},{\"kind\":\"suesnzw\",\"id\":\"jbavorxzdm\",\"metadata\":{\"self\":\"tbqvudw\",\"resource_name\":\"ndnvo\",\"created_at\":\"ujjugwdkcglh\",\"updated_at\":\"azjdyggd\",\"deleted_at\":\"ixhbkuofqweykhm\"},\"display_name\":\"evfyexfwhybcib\",\"description\":\"vdcsitynn\"},{\"kind\":\"mdectehfiqscjey\",\"id\":\"hezrkgq\",\"metadata\":{\"self\":\"refovgmkqsleyyvx\",\"resource_name\":\"jpkcattpng\",\"created_at\":\"rcczsqpjhvmd\",\"updated_at\":\"v\",\"deleted_at\":\"sounqecanoaeu\"},\"display_name\":\"hy\",\"description\":\"trpmo\"},{\"kind\":\"mcmatuokthfuiu\",\"id\":\"dsfcpkvxodpuoz\",\"metadata\":{\"self\":\"ydagfuaxbe\",\"resource_name\":\"iu\",\"created_at\":\"ktwh\",\"updated_at\":\"xw\",\"deleted_at\":\"wqsmbsur\"},\"display_name\":\"imoryocfsfksym\",\"description\":\"ys\"}]}") - .toObject(AccessListServiceAccountsSuccessResponseInner.class); - Assertions.assertEquals("cbonqvpk", model.kind()); - Assertions.assertEquals("xnj", model.metadata().first()); - Assertions.assertEquals("seiphe", model.metadata().last()); - Assertions.assertEquals("lokeyy", model.metadata().prev()); - Assertions.assertEquals("nj", model.metadata().next()); - Assertions.assertEquals(147839675, model.metadata().totalSize()); - Assertions.assertEquals("rhpdjpjumas", model.data().get(0).kind()); - Assertions.assertEquals("zj", model.data().get(0).id()); - Assertions.assertEquals("e", model.data().get(0).metadata().self()); - Assertions.assertEquals("alhbx", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("e", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("zzvdudgwds", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("hotwmcynpwlbjnp", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("cftadeh", model.data().get(0).displayName()); - Assertions.assertEquals("ltyfsop", model.data().get(0).description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListServiceAccountsSuccessResponseInner model - = new AccessListServiceAccountsSuccessResponseInner().withKind("cbonqvpk") - .withMetadata(new ConfluentListMetadata().withFirst("xnj") - .withLast("seiphe") - .withPrev("lokeyy") - .withNext("nj") - .withTotalSize(147839675)) - .withData(Arrays.asList( - new ServiceAccountRecord().withKind("rhpdjpjumas") - .withId("zj") - .withMetadata(new MetadataEntity().withSelf("e") - .withResourceName("alhbx") - .withCreatedAt("e") - .withUpdatedAt("zzvdudgwds") - .withDeletedAt("hotwmcynpwlbjnp")) - .withDisplayName("cftadeh") - .withDescription("ltyfsop"), - new ServiceAccountRecord().withKind("suesnzw") - .withId("jbavorxzdm") - .withMetadata(new MetadataEntity().withSelf("tbqvudw") - .withResourceName("ndnvo") - .withCreatedAt("ujjugwdkcglh") - .withUpdatedAt("azjdyggd") - .withDeletedAt("ixhbkuofqweykhm")) - .withDisplayName("evfyexfwhybcib") - .withDescription("vdcsitynn"), - new ServiceAccountRecord().withKind("mdectehfiqscjey") - .withId("hezrkgq") - .withMetadata(new MetadataEntity().withSelf("refovgmkqsleyyvx") - .withResourceName("jpkcattpng") - .withCreatedAt("rcczsqpjhvmd") - .withUpdatedAt("v") - .withDeletedAt("sounqecanoaeu")) - .withDisplayName("hy") - .withDescription("trpmo"), - new ServiceAccountRecord().withKind("mcmatuokthfuiu") - .withId("dsfcpkvxodpuoz") - .withMetadata(new MetadataEntity().withSelf("ydagfuaxbe") - .withResourceName("iu") - .withCreatedAt("ktwh") - .withUpdatedAt("xw") - .withDeletedAt("wqsmbsur")) - .withDisplayName("imoryocfsfksym") - .withDescription("ys"))); - model = BinaryData.fromObject(model).toObject(AccessListServiceAccountsSuccessResponseInner.class); - Assertions.assertEquals("cbonqvpk", model.kind()); - Assertions.assertEquals("xnj", model.metadata().first()); - Assertions.assertEquals("seiphe", model.metadata().last()); - Assertions.assertEquals("lokeyy", model.metadata().prev()); - Assertions.assertEquals("nj", model.metadata().next()); - Assertions.assertEquals(147839675, model.metadata().totalSize()); - Assertions.assertEquals("rhpdjpjumas", model.data().get(0).kind()); - Assertions.assertEquals("zj", model.data().get(0).id()); - Assertions.assertEquals("e", model.data().get(0).metadata().self()); - Assertions.assertEquals("alhbx", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("e", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("zzvdudgwds", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("hotwmcynpwlbjnp", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("cftadeh", model.data().get(0).displayName()); - Assertions.assertEquals("ltyfsop", model.data().get(0).description()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsWithResponseMockTests.java deleted file mode 100644 index e7e2cc29a758..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListServiceAccountsWithResponseMockTests.java +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListServiceAccountsSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListServiceAccountsWithResponseMockTests { - @Test - public void testListServiceAccountsWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"rlpyznuciqdsmexi\",\"metadata\":{\"first\":\"fuxtyasiibmiybnn\",\"last\":\"tgnljhnmgixhcmav\",\"prev\":\"foudor\",\"next\":\"gyyprotwy\",\"total_size\":677853893},\"data\":[{\"kind\":\"xhugcm\",\"id\":\"avlg\",\"metadata\":{\"self\":\"mftpmdtz\",\"resource_name\":\"ltfvnz\",\"created_at\":\"jtotpvopvpbd\",\"updated_at\":\"qgqqihedsvqwthmk\",\"deleted_at\":\"bcysih\"},\"display_name\":\"qcwdhoh\",\"description\":\"tmcdzsufcohd\"},{\"kind\":\"zlmcmuapcvhdb\",\"id\":\"wqqxeysko\",\"metadata\":{\"self\":\"inkfkbgbz\",\"resource_name\":\"wxeqocljmygvkzqk\",\"created_at\":\"eokbze\",\"updated_at\":\"zrxcczurt\",\"deleted_at\":\"ipqxbkwvzgnzv\"},\"display_name\":\"bzdixzmq\",\"description\":\"odawopqhewjptmcg\"},{\"kind\":\"ostzelndlatu\",\"id\":\"zlbiojlvfhrbbpn\",\"metadata\":{\"self\":\"cwwyyur\",\"resource_name\":\"chpp\",\"created_at\":\"rsnm\",\"updated_at\":\"ayzejnhlbkpbz\",\"deleted_at\":\"piljhahzvech\"},\"display_name\":\"bnwieholew\",\"description\":\"iuubwefqsf\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListServiceAccountsSuccessResponse response - = manager.access() - .listServiceAccountsWithResponse("nw", "acevehjkuyx", - new ListAccessRequestModel().withSearchFilters( - mapOf("faey", "gaoql", "hriypoqeyhlqhy", "inmfgvxirp")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("rlpyznuciqdsmexi", response.kind()); - Assertions.assertEquals("fuxtyasiibmiybnn", response.metadata().first()); - Assertions.assertEquals("tgnljhnmgixhcmav", response.metadata().last()); - Assertions.assertEquals("foudor", response.metadata().prev()); - Assertions.assertEquals("gyyprotwy", response.metadata().next()); - Assertions.assertEquals(677853893, response.metadata().totalSize()); - Assertions.assertEquals("xhugcm", response.data().get(0).kind()); - Assertions.assertEquals("avlg", response.data().get(0).id()); - Assertions.assertEquals("mftpmdtz", response.data().get(0).metadata().self()); - Assertions.assertEquals("ltfvnz", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("jtotpvopvpbd", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("qgqqihedsvqwthmk", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("bcysih", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("qcwdhoh", response.data().get(0).displayName()); - Assertions.assertEquals("tmcdzsufcohd", response.data().get(0).description()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSuccessResponseInnerTests.java deleted file mode 100644 index 01c9b895d3c1..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersSuccessResponseInnerTests.java +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessListUsersSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.UserRecord; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessListUsersSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessListUsersSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"rjb\",\"metadata\":{\"first\":\"rcjxvsnbyxqabn\",\"last\":\"cpc\",\"prev\":\"hurzafblj\",\"next\":\"pbtoqcjmkl\",\"total_size\":1217713745},\"data\":[{\"kind\":\"dtqajzyulpkudj\",\"id\":\"lkhbz\",\"metadata\":{\"self\":\"pgzgq\",\"resource_name\":\"zloc\",\"created_at\":\"c\",\"updated_at\":\"ierhhbcsglummaj\",\"deleted_at\":\"aodxo\"},\"email\":\"bdxkqpxokaj\",\"full_name\":\"npime\",\"auth_type\":\"stxgc\"},{\"kind\":\"dg\",\"id\":\"ajrmvdjwzrlovmc\",\"metadata\":{\"self\":\"ijcoejctb\",\"resource_name\":\"qsqsy\",\"created_at\":\"kbfkg\",\"updated_at\":\"dkexxppofm\",\"deleted_at\":\"x\"},\"email\":\"jpgd\",\"full_name\":\"ocjjxhvpmouexh\",\"auth_type\":\"xibqeojnx\"},{\"kind\":\"zvddntwndeicbtwn\",\"id\":\"aoqvuh\",\"metadata\":{\"self\":\"f\",\"resource_name\":\"yd\",\"created_at\":\"lmjthjq\",\"updated_at\":\"pyeicxm\",\"deleted_at\":\"iwqvhkh\"},\"email\":\"uigdtopbobjog\",\"full_name\":\"e\",\"auth_type\":\"a\"}]}") - .toObject(AccessListUsersSuccessResponseInner.class); - Assertions.assertEquals("rjb", model.kind()); - Assertions.assertEquals("rcjxvsnbyxqabn", model.metadata().first()); - Assertions.assertEquals("cpc", model.metadata().last()); - Assertions.assertEquals("hurzafblj", model.metadata().prev()); - Assertions.assertEquals("pbtoqcjmkl", model.metadata().next()); - Assertions.assertEquals(1217713745, model.metadata().totalSize()); - Assertions.assertEquals("dtqajzyulpkudj", model.data().get(0).kind()); - Assertions.assertEquals("lkhbz", model.data().get(0).id()); - Assertions.assertEquals("pgzgq", model.data().get(0).metadata().self()); - Assertions.assertEquals("zloc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("c", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("ierhhbcsglummaj", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("aodxo", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("bdxkqpxokaj", model.data().get(0).email()); - Assertions.assertEquals("npime", model.data().get(0).fullName()); - Assertions.assertEquals("stxgc", model.data().get(0).authType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessListUsersSuccessResponseInner model = new AccessListUsersSuccessResponseInner().withKind("rjb") - .withMetadata(new ConfluentListMetadata().withFirst("rcjxvsnbyxqabn") - .withLast("cpc") - .withPrev("hurzafblj") - .withNext("pbtoqcjmkl") - .withTotalSize(1217713745)) - .withData(Arrays.asList( - new UserRecord().withKind("dtqajzyulpkudj") - .withId("lkhbz") - .withMetadata(new MetadataEntity().withSelf("pgzgq") - .withResourceName("zloc") - .withCreatedAt("c") - .withUpdatedAt("ierhhbcsglummaj") - .withDeletedAt("aodxo")) - .withEmail("bdxkqpxokaj") - .withFullName("npime") - .withAuthType("stxgc"), - new UserRecord().withKind("dg") - .withId("ajrmvdjwzrlovmc") - .withMetadata(new MetadataEntity().withSelf("ijcoejctb") - .withResourceName("qsqsy") - .withCreatedAt("kbfkg") - .withUpdatedAt("dkexxppofm") - .withDeletedAt("x")) - .withEmail("jpgd") - .withFullName("ocjjxhvpmouexh") - .withAuthType("xibqeojnx"), - new UserRecord().withKind("zvddntwndeicbtwn") - .withId("aoqvuh") - .withMetadata(new MetadataEntity().withSelf("f") - .withResourceName("yd") - .withCreatedAt("lmjthjq") - .withUpdatedAt("pyeicxm") - .withDeletedAt("iwqvhkh")) - .withEmail("uigdtopbobjog") - .withFullName("e") - .withAuthType("a"))); - model = BinaryData.fromObject(model).toObject(AccessListUsersSuccessResponseInner.class); - Assertions.assertEquals("rjb", model.kind()); - Assertions.assertEquals("rcjxvsnbyxqabn", model.metadata().first()); - Assertions.assertEquals("cpc", model.metadata().last()); - Assertions.assertEquals("hurzafblj", model.metadata().prev()); - Assertions.assertEquals("pbtoqcjmkl", model.metadata().next()); - Assertions.assertEquals(1217713745, model.metadata().totalSize()); - Assertions.assertEquals("dtqajzyulpkudj", model.data().get(0).kind()); - Assertions.assertEquals("lkhbz", model.data().get(0).id()); - Assertions.assertEquals("pgzgq", model.data().get(0).metadata().self()); - Assertions.assertEquals("zloc", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("c", model.data().get(0).metadata().createdAt()); - Assertions.assertEquals("ierhhbcsglummaj", model.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("aodxo", model.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("bdxkqpxokaj", model.data().get(0).email()); - Assertions.assertEquals("npime", model.data().get(0).fullName()); - Assertions.assertEquals("stxgc", model.data().get(0).authType()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersWithResponseMockTests.java deleted file mode 100644 index a5d0112f2337..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessListUsersWithResponseMockTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.AccessListUsersSuccessResponse; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class AccessListUsersWithResponseMockTests { - @Test - public void testListUsersWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"jtlo\",\"metadata\":{\"first\":\"uojrngiflr\",\"last\":\"asccbiui\",\"prev\":\"dlyjdf\",\"next\":\"mkyoqufdvruzsl\",\"total_size\":15628056},\"data\":[{\"kind\":\"tfnmdx\",\"id\":\"ngfdgugeyzihgrky\",\"metadata\":{\"self\":\"absnmfpp\",\"resource_name\":\"jee\",\"created_at\":\"hyhsgzfczbg\",\"updated_at\":\"fgbegl\",\"deleted_at\":\"leohibetnluankr\"},\"email\":\"xeeebtijvacvbmqz\",\"full_name\":\"q\",\"auth_type\":\"aj\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - AccessListUsersSuccessResponse response = manager.access() - .listUsersWithResponse("qbnj", "rcgegydcwboxjum", - new ListAccessRequestModel().withSearchFilters(mapOf("ouau", "olihrra")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("jtlo", response.kind()); - Assertions.assertEquals("uojrngiflr", response.metadata().first()); - Assertions.assertEquals("asccbiui", response.metadata().last()); - Assertions.assertEquals("dlyjdf", response.metadata().prev()); - Assertions.assertEquals("mkyoqufdvruzsl", response.metadata().next()); - Assertions.assertEquals(15628056, response.metadata().totalSize()); - Assertions.assertEquals("tfnmdx", response.data().get(0).kind()); - Assertions.assertEquals("ngfdgugeyzihgrky", response.data().get(0).id()); - Assertions.assertEquals("absnmfpp", response.data().get(0).metadata().self()); - Assertions.assertEquals("jee", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("hyhsgzfczbg", response.data().get(0).metadata().createdAt()); - Assertions.assertEquals("fgbegl", response.data().get(0).metadata().updatedAt()); - Assertions.assertEquals("leohibetnluankr", response.data().get(0).metadata().deletedAt()); - Assertions.assertEquals("xeeebtijvacvbmqz", response.data().get(0).email()); - Assertions.assertEquals("q", response.data().get(0).fullName()); - Assertions.assertEquals("aj", response.data().get(0).authType()); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessRoleBindingNameListSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessRoleBindingNameListSuccessResponseInnerTests.java deleted file mode 100644 index acc5899b5e63..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/AccessRoleBindingNameListSuccessResponseInnerTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.AccessRoleBindingNameListSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class AccessRoleBindingNameListSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - AccessRoleBindingNameListSuccessResponseInner model = BinaryData.fromString( - "{\"kind\":\"btkuwhh\",\"metadata\":{\"first\":\"k\",\"last\":\"oxafn\",\"prev\":\"lpichk\",\"next\":\"mkcdyhbpkkpwdre\",\"total_size\":1405054510},\"data\":[\"qfovljxywsuws\"]}") - .toObject(AccessRoleBindingNameListSuccessResponseInner.class); - Assertions.assertEquals("btkuwhh", model.kind()); - Assertions.assertEquals("k", model.metadata().first()); - Assertions.assertEquals("oxafn", model.metadata().last()); - Assertions.assertEquals("lpichk", model.metadata().prev()); - Assertions.assertEquals("mkcdyhbpkkpwdre", model.metadata().next()); - Assertions.assertEquals(1405054510, model.metadata().totalSize()); - Assertions.assertEquals("qfovljxywsuws", model.data().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - AccessRoleBindingNameListSuccessResponseInner model - = new AccessRoleBindingNameListSuccessResponseInner().withKind("btkuwhh") - .withMetadata(new ConfluentListMetadata().withFirst("k") - .withLast("oxafn") - .withPrev("lpichk") - .withNext("mkcdyhbpkkpwdre") - .withTotalSize(1405054510)) - .withData(Arrays.asList("qfovljxywsuws")); - model = BinaryData.fromObject(model).toObject(AccessRoleBindingNameListSuccessResponseInner.class); - Assertions.assertEquals("btkuwhh", model.kind()); - Assertions.assertEquals("k", model.metadata().first()); - Assertions.assertEquals("oxafn", model.metadata().last()); - Assertions.assertEquals("lpichk", model.metadata().prev()); - Assertions.assertEquals("mkcdyhbpkkpwdre", model.metadata().next()); - Assertions.assertEquals(1405054510, model.metadata().totalSize()); - Assertions.assertEquals("qfovljxywsuws", model.data().get(0)); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyOwnerEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyOwnerEntityTests.java deleted file mode 100644 index 572d4b9a0b4d..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyOwnerEntityTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ApiKeyOwnerEntity; -import org.junit.jupiter.api.Assertions; - -public final class ApiKeyOwnerEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ApiKeyOwnerEntity model = BinaryData - .fromString("{\"id\":\"htba\",\"related\":\"gx\",\"resourceName\":\"rc\",\"kind\":\"yklyhpluodpvruud\"}") - .toObject(ApiKeyOwnerEntity.class); - Assertions.assertEquals("htba", model.id()); - Assertions.assertEquals("gx", model.related()); - Assertions.assertEquals("rc", model.resourceName()); - Assertions.assertEquals("yklyhpluodpvruud", model.kind()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ApiKeyOwnerEntity model = new ApiKeyOwnerEntity().withId("htba") - .withRelated("gx") - .withResourceName("rc") - .withKind("yklyhpluodpvruud"); - model = BinaryData.fromObject(model).toObject(ApiKeyOwnerEntity.class); - Assertions.assertEquals("htba", model.id()); - Assertions.assertEquals("gx", model.related()); - Assertions.assertEquals("rc", model.resourceName()); - Assertions.assertEquals("yklyhpluodpvruud", model.kind()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyResourceEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyResourceEntityTests.java deleted file mode 100644 index 99d97c510ffc..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ApiKeyResourceEntityTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ApiKeyResourceEntity; -import org.junit.jupiter.api.Assertions; - -public final class ApiKeyResourceEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ApiKeyResourceEntity model = BinaryData.fromString( - "{\"id\":\"jmsvpkjp\",\"environment\":\"kwcf\",\"related\":\"ljyxgtczhe\",\"resourceName\":\"bsdshmkxmaehvbbx\",\"kind\":\"iplt\"}") - .toObject(ApiKeyResourceEntity.class); - Assertions.assertEquals("jmsvpkjp", model.id()); - Assertions.assertEquals("kwcf", model.environment()); - Assertions.assertEquals("ljyxgtczhe", model.related()); - Assertions.assertEquals("bsdshmkxmaehvbbx", model.resourceName()); - Assertions.assertEquals("iplt", model.kind()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ApiKeyResourceEntity model = new ApiKeyResourceEntity().withId("jmsvpkjp") - .withEnvironment("kwcf") - .withRelated("ljyxgtczhe") - .withResourceName("bsdshmkxmaehvbbx") - .withKind("iplt"); - model = BinaryData.fromObject(model).toObject(ApiKeyResourceEntity.class); - Assertions.assertEquals("jmsvpkjp", model.id()); - Assertions.assertEquals("kwcf", model.environment()); - Assertions.assertEquals("ljyxgtczhe", model.related()); - Assertions.assertEquals("bsdshmkxmaehvbbx", model.resourceName()); - Assertions.assertEquals("iplt", model.kind()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterByokEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterByokEntityTests.java deleted file mode 100644 index 5582bcf74e15..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterByokEntityTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterByokEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterByokEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterByokEntity model - = BinaryData.fromString("{\"id\":\"afcnih\",\"related\":\"qapnedgfbcv\",\"resource_name\":\"vq\"}") - .toObject(ClusterByokEntity.class); - Assertions.assertEquals("afcnih", model.id()); - Assertions.assertEquals("qapnedgfbcv", model.related()); - Assertions.assertEquals("vq", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterByokEntity model - = new ClusterByokEntity().withId("afcnih").withRelated("qapnedgfbcv").withResourceName("vq"); - model = BinaryData.fromObject(model).toObject(ClusterByokEntity.class); - Assertions.assertEquals("afcnih", model.id()); - Assertions.assertEquals("qapnedgfbcv", model.related()); - Assertions.assertEquals("vq", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterConfigEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterConfigEntityTests.java deleted file mode 100644 index 7bc68815808a..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterConfigEntityTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterConfigEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterConfigEntity model - = BinaryData.fromString("{\"kind\":\"hdneuelfph\"}").toObject(ClusterConfigEntity.class); - Assertions.assertEquals("hdneuelfph", model.kind()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterConfigEntity model = new ClusterConfigEntity().withKind("hdneuelfph"); - model = BinaryData.fromObject(model).toObject(ClusterConfigEntity.class); - Assertions.assertEquals("hdneuelfph", model.kind()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterEnvironmentEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterEnvironmentEntityTests.java deleted file mode 100644 index 1b6d74c879fe..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterEnvironmentEntityTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterEnvironmentEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterEnvironmentEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterEnvironmentEntity model = BinaryData.fromString( - "{\"id\":\"yhtozfikdowwqu\",\"environment\":\"xzxcl\",\"related\":\"thhqzonosggbh\",\"resource_name\":\"hfwdsjnkaljutiis\"}") - .toObject(ClusterEnvironmentEntity.class); - Assertions.assertEquals("yhtozfikdowwqu", model.id()); - Assertions.assertEquals("xzxcl", model.environment()); - Assertions.assertEquals("thhqzonosggbh", model.related()); - Assertions.assertEquals("hfwdsjnkaljutiis", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterEnvironmentEntity model = new ClusterEnvironmentEntity().withId("yhtozfikdowwqu") - .withEnvironment("xzxcl") - .withRelated("thhqzonosggbh") - .withResourceName("hfwdsjnkaljutiis"); - model = BinaryData.fromObject(model).toObject(ClusterEnvironmentEntity.class); - Assertions.assertEquals("yhtozfikdowwqu", model.id()); - Assertions.assertEquals("xzxcl", model.environment()); - Assertions.assertEquals("thhqzonosggbh", model.related()); - Assertions.assertEquals("hfwdsjnkaljutiis", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterNetworkEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterNetworkEntityTests.java deleted file mode 100644 index 70e3cde970a2..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterNetworkEntityTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterNetworkEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterNetworkEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterNetworkEntity model = BinaryData.fromString( - "{\"id\":\"cffgdkzzewk\",\"environment\":\"hqcrailvpnpp\",\"related\":\"flrwd\",\"resource_name\":\"dlxyjrxs\"}") - .toObject(ClusterNetworkEntity.class); - Assertions.assertEquals("cffgdkzzewk", model.id()); - Assertions.assertEquals("hqcrailvpnpp", model.environment()); - Assertions.assertEquals("flrwd", model.related()); - Assertions.assertEquals("dlxyjrxs", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterNetworkEntity model = new ClusterNetworkEntity().withId("cffgdkzzewk") - .withEnvironment("hqcrailvpnpp") - .withRelated("flrwd") - .withResourceName("dlxyjrxs"); - model = BinaryData.fromObject(model).toObject(ClusterNetworkEntity.class); - Assertions.assertEquals("cffgdkzzewk", model.id()); - Assertions.assertEquals("hqcrailvpnpp", model.environment()); - Assertions.assertEquals("flrwd", model.related()); - Assertions.assertEquals("dlxyjrxs", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterPropertiesTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterPropertiesTests.java deleted file mode 100644 index 2eafafcad139..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterPropertiesTests.java +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ClusterProperties; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import com.azure.resourcemanager.confluent.models.SCClusterByokEntity; -import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterProperties model = BinaryData.fromString( - "{\"metadata\":{\"self\":\"vxccedcp\",\"resourceName\":\"dyodnwzxltj\",\"createdTimestamp\":\"nhltiugcxn\",\"updatedTimestamp\":\"vwxqibyqunyo\",\"deletedTimestamp\":\"wlmdjrkv\"},\"spec\":{\"name\":\"vfvpdbodaciz\",\"availability\":\"q\",\"cloud\":\"krribdeibqi\",\"zone\":\"kghv\",\"region\":\"dzwmkrefajpj\",\"kafkaBootstrapEndpoint\":\"wkqnyhg\",\"httpEndpoint\":\"j\",\"apiEndpoint\":\"ivfxzsjabibsyst\",\"config\":{\"kind\":\"sdjpvkvp\"},\"environment\":{\"id\":\"bkzbzkd\",\"environment\":\"cjabudurgkakmo\",\"related\":\"hjjklff\",\"resourceName\":\"ouw\"},\"network\":{\"id\":\"zrfze\",\"environment\":\"ebizikayuh\",\"related\":\"bjbsybb\",\"resourceName\":\"r\"},\"byok\":{\"id\":\"dgmfpgvmpipasl\",\"related\":\"aqfxss\",\"resourceName\":\"u\"}},\"status\":{\"phase\":\"dsrezpdrhneuyow\",\"cku\":150720980}}") - .toObject(ClusterProperties.class); - Assertions.assertEquals("vxccedcp", model.metadata().self()); - Assertions.assertEquals("dyodnwzxltj", model.metadata().resourceName()); - Assertions.assertEquals("nhltiugcxn", model.metadata().createdTimestamp()); - Assertions.assertEquals("vwxqibyqunyo", model.metadata().updatedTimestamp()); - Assertions.assertEquals("wlmdjrkv", model.metadata().deletedTimestamp()); - Assertions.assertEquals("vfvpdbodaciz", model.spec().name()); - Assertions.assertEquals("q", model.spec().availability()); - Assertions.assertEquals("krribdeibqi", model.spec().cloud()); - Assertions.assertEquals("kghv", model.spec().zone()); - Assertions.assertEquals("dzwmkrefajpj", model.spec().region()); - Assertions.assertEquals("wkqnyhg", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("j", model.spec().httpEndpoint()); - Assertions.assertEquals("ivfxzsjabibsyst", model.spec().apiEndpoint()); - Assertions.assertEquals("sdjpvkvp", model.spec().config().kind()); - Assertions.assertEquals("bkzbzkd", model.spec().environment().id()); - Assertions.assertEquals("cjabudurgkakmo", model.spec().environment().environment()); - Assertions.assertEquals("hjjklff", model.spec().environment().related()); - Assertions.assertEquals("ouw", model.spec().environment().resourceName()); - Assertions.assertEquals("zrfze", model.spec().network().id()); - Assertions.assertEquals("ebizikayuh", model.spec().network().environment()); - Assertions.assertEquals("bjbsybb", model.spec().network().related()); - Assertions.assertEquals("r", model.spec().network().resourceName()); - Assertions.assertEquals("dgmfpgvmpipasl", model.spec().byok().id()); - Assertions.assertEquals("aqfxss", model.spec().byok().related()); - Assertions.assertEquals("u", model.spec().byok().resourceName()); - Assertions.assertEquals("dsrezpdrhneuyow", model.status().phase()); - Assertions.assertEquals(150720980, model.status().cku()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterProperties model = new ClusterProperties() - .withMetadata(new SCMetadataEntity().withSelf("vxccedcp") - .withResourceName("dyodnwzxltj") - .withCreatedTimestamp("nhltiugcxn") - .withUpdatedTimestamp("vwxqibyqunyo") - .withDeletedTimestamp("wlmdjrkv")) - .withSpec(new SCClusterSpecEntity().withName("vfvpdbodaciz") - .withAvailability("q") - .withCloud("krribdeibqi") - .withZone("kghv") - .withRegion("dzwmkrefajpj") - .withKafkaBootstrapEndpoint("wkqnyhg") - .withHttpEndpoint("j") - .withApiEndpoint("ivfxzsjabibsyst") - .withConfig(new ClusterConfigEntity().withKind("sdjpvkvp")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("bkzbzkd") - .withEnvironment("cjabudurgkakmo") - .withRelated("hjjklff") - .withResourceName("ouw")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("zrfze") - .withEnvironment("ebizikayuh") - .withRelated("bjbsybb") - .withResourceName("r")) - .withByok( - new SCClusterByokEntity().withId("dgmfpgvmpipasl").withRelated("aqfxss").withResourceName("u"))) - .withStatus(new ClusterStatusEntity().withPhase("dsrezpdrhneuyow").withCku(150720980)); - model = BinaryData.fromObject(model).toObject(ClusterProperties.class); - Assertions.assertEquals("vxccedcp", model.metadata().self()); - Assertions.assertEquals("dyodnwzxltj", model.metadata().resourceName()); - Assertions.assertEquals("nhltiugcxn", model.metadata().createdTimestamp()); - Assertions.assertEquals("vwxqibyqunyo", model.metadata().updatedTimestamp()); - Assertions.assertEquals("wlmdjrkv", model.metadata().deletedTimestamp()); - Assertions.assertEquals("vfvpdbodaciz", model.spec().name()); - Assertions.assertEquals("q", model.spec().availability()); - Assertions.assertEquals("krribdeibqi", model.spec().cloud()); - Assertions.assertEquals("kghv", model.spec().zone()); - Assertions.assertEquals("dzwmkrefajpj", model.spec().region()); - Assertions.assertEquals("wkqnyhg", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("j", model.spec().httpEndpoint()); - Assertions.assertEquals("ivfxzsjabibsyst", model.spec().apiEndpoint()); - Assertions.assertEquals("sdjpvkvp", model.spec().config().kind()); - Assertions.assertEquals("bkzbzkd", model.spec().environment().id()); - Assertions.assertEquals("cjabudurgkakmo", model.spec().environment().environment()); - Assertions.assertEquals("hjjklff", model.spec().environment().related()); - Assertions.assertEquals("ouw", model.spec().environment().resourceName()); - Assertions.assertEquals("zrfze", model.spec().network().id()); - Assertions.assertEquals("ebizikayuh", model.spec().network().environment()); - Assertions.assertEquals("bjbsybb", model.spec().network().related()); - Assertions.assertEquals("r", model.spec().network().resourceName()); - Assertions.assertEquals("dgmfpgvmpipasl", model.spec().byok().id()); - Assertions.assertEquals("aqfxss", model.spec().byok().related()); - Assertions.assertEquals("u", model.spec().byok().resourceName()); - Assertions.assertEquals("dsrezpdrhneuyow", model.status().phase()); - Assertions.assertEquals(150720980, model.status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterRecordTests.java deleted file mode 100644 index 69551dc84251..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterRecordTests.java +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterByokEntity; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.ClusterNetworkEntity; -import com.azure.resourcemanager.confluent.models.ClusterRecord; -import com.azure.resourcemanager.confluent.models.ClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterRecord model = BinaryData.fromString( - "{\"kind\":\"qaqtdoqmcbxvwvxy\",\"id\":\"qbhsfxobl\",\"metadata\":{\"self\":\"blmpewww\",\"resource_name\":\"krvrns\",\"created_at\":\"hqjohxcrsbfova\",\"updated_at\":\"ruvw\",\"deleted_at\":\"sqfsubcgjbirxb\"},\"display_name\":\"bsrfbj\",\"spec\":{\"display_name\":\"w\",\"availability\":\"otftpvjzbexilz\",\"cloud\":\"fqqnvwpmqtaruo\",\"zone\":\"mkcjhwqytjrybn\",\"region\":\"ewgdrjervn\",\"kafka_bootstrap_endpoint\":\"nqpeh\",\"http_endpoint\":\"doy\",\"api_endpoint\":\"ifthnz\",\"config\":{\"kind\":\"sl\"},\"environment\":{\"id\":\"yq\",\"environment\":\"ynduha\",\"related\":\"qlkth\",\"resource_name\":\"aqolbgycduiertg\"},\"network\":{\"id\":\"mvaolps\",\"environment\":\"qlfmmdnbb\",\"related\":\"zpswiydmc\",\"resource_name\":\"hzdxssadbzm\"},\"byok\":{\"id\":\"fznudaodvxzb\",\"related\":\"blylpstdbh\",\"resource_name\":\"srzdzucerscdn\"}},\"status\":{\"phase\":\"vfiwjmygtdss\",\"cku\":1943196565}}") - .toObject(ClusterRecord.class); - Assertions.assertEquals("qaqtdoqmcbxvwvxy", model.kind()); - Assertions.assertEquals("qbhsfxobl", model.id()); - Assertions.assertEquals("blmpewww", model.metadata().self()); - Assertions.assertEquals("krvrns", model.metadata().resourceName()); - Assertions.assertEquals("hqjohxcrsbfova", model.metadata().createdAt()); - Assertions.assertEquals("ruvw", model.metadata().updatedAt()); - Assertions.assertEquals("sqfsubcgjbirxb", model.metadata().deletedAt()); - Assertions.assertEquals("bsrfbj", model.displayName()); - Assertions.assertEquals("w", model.spec().displayName()); - Assertions.assertEquals("otftpvjzbexilz", model.spec().availability()); - Assertions.assertEquals("fqqnvwpmqtaruo", model.spec().cloud()); - Assertions.assertEquals("mkcjhwqytjrybn", model.spec().zone()); - Assertions.assertEquals("ewgdrjervn", model.spec().region()); - Assertions.assertEquals("nqpeh", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("doy", model.spec().httpEndpoint()); - Assertions.assertEquals("ifthnz", model.spec().apiEndpoint()); - Assertions.assertEquals("sl", model.spec().config().kind()); - Assertions.assertEquals("yq", model.spec().environment().id()); - Assertions.assertEquals("ynduha", model.spec().environment().environment()); - Assertions.assertEquals("qlkth", model.spec().environment().related()); - Assertions.assertEquals("aqolbgycduiertg", model.spec().environment().resourceName()); - Assertions.assertEquals("mvaolps", model.spec().network().id()); - Assertions.assertEquals("qlfmmdnbb", model.spec().network().environment()); - Assertions.assertEquals("zpswiydmc", model.spec().network().related()); - Assertions.assertEquals("hzdxssadbzm", model.spec().network().resourceName()); - Assertions.assertEquals("fznudaodvxzb", model.spec().byok().id()); - Assertions.assertEquals("blylpstdbh", model.spec().byok().related()); - Assertions.assertEquals("srzdzucerscdn", model.spec().byok().resourceName()); - Assertions.assertEquals("vfiwjmygtdss", model.status().phase()); - Assertions.assertEquals(1943196565, model.status().cku()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterRecord model = new ClusterRecord().withKind("qaqtdoqmcbxvwvxy") - .withId("qbhsfxobl") - .withMetadata(new MetadataEntity().withSelf("blmpewww") - .withResourceName("krvrns") - .withCreatedAt("hqjohxcrsbfova") - .withUpdatedAt("ruvw") - .withDeletedAt("sqfsubcgjbirxb")) - .withDisplayName("bsrfbj") - .withSpec(new ClusterSpecEntity().withDisplayName("w") - .withAvailability("otftpvjzbexilz") - .withCloud("fqqnvwpmqtaruo") - .withZone("mkcjhwqytjrybn") - .withRegion("ewgdrjervn") - .withKafkaBootstrapEndpoint("nqpeh") - .withHttpEndpoint("doy") - .withApiEndpoint("ifthnz") - .withConfig(new ClusterConfigEntity().withKind("sl")) - .withEnvironment(new ClusterEnvironmentEntity().withId("yq") - .withEnvironment("ynduha") - .withRelated("qlkth") - .withResourceName("aqolbgycduiertg")) - .withNetwork(new ClusterNetworkEntity().withId("mvaolps") - .withEnvironment("qlfmmdnbb") - .withRelated("zpswiydmc") - .withResourceName("hzdxssadbzm")) - .withByok(new ClusterByokEntity().withId("fznudaodvxzb") - .withRelated("blylpstdbh") - .withResourceName("srzdzucerscdn"))) - .withStatus(new ClusterStatusEntity().withPhase("vfiwjmygtdss").withCku(1943196565)); - model = BinaryData.fromObject(model).toObject(ClusterRecord.class); - Assertions.assertEquals("qaqtdoqmcbxvwvxy", model.kind()); - Assertions.assertEquals("qbhsfxobl", model.id()); - Assertions.assertEquals("blmpewww", model.metadata().self()); - Assertions.assertEquals("krvrns", model.metadata().resourceName()); - Assertions.assertEquals("hqjohxcrsbfova", model.metadata().createdAt()); - Assertions.assertEquals("ruvw", model.metadata().updatedAt()); - Assertions.assertEquals("sqfsubcgjbirxb", model.metadata().deletedAt()); - Assertions.assertEquals("bsrfbj", model.displayName()); - Assertions.assertEquals("w", model.spec().displayName()); - Assertions.assertEquals("otftpvjzbexilz", model.spec().availability()); - Assertions.assertEquals("fqqnvwpmqtaruo", model.spec().cloud()); - Assertions.assertEquals("mkcjhwqytjrybn", model.spec().zone()); - Assertions.assertEquals("ewgdrjervn", model.spec().region()); - Assertions.assertEquals("nqpeh", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("doy", model.spec().httpEndpoint()); - Assertions.assertEquals("ifthnz", model.spec().apiEndpoint()); - Assertions.assertEquals("sl", model.spec().config().kind()); - Assertions.assertEquals("yq", model.spec().environment().id()); - Assertions.assertEquals("ynduha", model.spec().environment().environment()); - Assertions.assertEquals("qlkth", model.spec().environment().related()); - Assertions.assertEquals("aqolbgycduiertg", model.spec().environment().resourceName()); - Assertions.assertEquals("mvaolps", model.spec().network().id()); - Assertions.assertEquals("qlfmmdnbb", model.spec().network().environment()); - Assertions.assertEquals("zpswiydmc", model.spec().network().related()); - Assertions.assertEquals("hzdxssadbzm", model.spec().network().resourceName()); - Assertions.assertEquals("fznudaodvxzb", model.spec().byok().id()); - Assertions.assertEquals("blylpstdbh", model.spec().byok().related()); - Assertions.assertEquals("srzdzucerscdn", model.spec().byok().resourceName()); - Assertions.assertEquals("vfiwjmygtdss", model.status().phase()); - Assertions.assertEquals(1943196565, model.status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterSpecEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterSpecEntityTests.java deleted file mode 100644 index deba9b55b1da..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterSpecEntityTests.java +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterByokEntity; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.ClusterNetworkEntity; -import com.azure.resourcemanager.confluent.models.ClusterSpecEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterSpecEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterSpecEntity model = BinaryData.fromString( - "{\"display_name\":\"mweriofzpy\",\"availability\":\"emwabnet\",\"cloud\":\"hszhedplvwiwu\",\"zone\":\"wmbesldnkw\",\"region\":\"pp\",\"kafka_bootstrap_endpoint\":\"lcxog\",\"http_endpoint\":\"konzmnsik\",\"api_endpoint\":\"kqze\",\"config\":{\"kind\":\"dltfz\"},\"environment\":{\"id\":\"hvhgureod\",\"environment\":\"obdagxtibqdxb\",\"related\":\"akbogqxndlkzgxh\",\"resource_name\":\"iplbpodxunkbebxm\"},\"network\":{\"id\":\"yntwlrbq\",\"environment\":\"oievseotgqrlltm\",\"related\":\"lauwzizxbmpgcjef\",\"resource_name\":\"muvp\"},\"byok\":{\"id\":\"d\",\"related\":\"orppxebmnzbtb\",\"resource_name\":\"pglkf\"}}") - .toObject(ClusterSpecEntity.class); - Assertions.assertEquals("mweriofzpy", model.displayName()); - Assertions.assertEquals("emwabnet", model.availability()); - Assertions.assertEquals("hszhedplvwiwu", model.cloud()); - Assertions.assertEquals("wmbesldnkw", model.zone()); - Assertions.assertEquals("pp", model.region()); - Assertions.assertEquals("lcxog", model.kafkaBootstrapEndpoint()); - Assertions.assertEquals("konzmnsik", model.httpEndpoint()); - Assertions.assertEquals("kqze", model.apiEndpoint()); - Assertions.assertEquals("dltfz", model.config().kind()); - Assertions.assertEquals("hvhgureod", model.environment().id()); - Assertions.assertEquals("obdagxtibqdxb", model.environment().environment()); - Assertions.assertEquals("akbogqxndlkzgxh", model.environment().related()); - Assertions.assertEquals("iplbpodxunkbebxm", model.environment().resourceName()); - Assertions.assertEquals("yntwlrbq", model.network().id()); - Assertions.assertEquals("oievseotgqrlltm", model.network().environment()); - Assertions.assertEquals("lauwzizxbmpgcjef", model.network().related()); - Assertions.assertEquals("muvp", model.network().resourceName()); - Assertions.assertEquals("d", model.byok().id()); - Assertions.assertEquals("orppxebmnzbtb", model.byok().related()); - Assertions.assertEquals("pglkf", model.byok().resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterSpecEntity model = new ClusterSpecEntity().withDisplayName("mweriofzpy") - .withAvailability("emwabnet") - .withCloud("hszhedplvwiwu") - .withZone("wmbesldnkw") - .withRegion("pp") - .withKafkaBootstrapEndpoint("lcxog") - .withHttpEndpoint("konzmnsik") - .withApiEndpoint("kqze") - .withConfig(new ClusterConfigEntity().withKind("dltfz")) - .withEnvironment(new ClusterEnvironmentEntity().withId("hvhgureod") - .withEnvironment("obdagxtibqdxb") - .withRelated("akbogqxndlkzgxh") - .withResourceName("iplbpodxunkbebxm")) - .withNetwork(new ClusterNetworkEntity().withId("yntwlrbq") - .withEnvironment("oievseotgqrlltm") - .withRelated("lauwzizxbmpgcjef") - .withResourceName("muvp")) - .withByok(new ClusterByokEntity().withId("d").withRelated("orppxebmnzbtb").withResourceName("pglkf")); - model = BinaryData.fromObject(model).toObject(ClusterSpecEntity.class); - Assertions.assertEquals("mweriofzpy", model.displayName()); - Assertions.assertEquals("emwabnet", model.availability()); - Assertions.assertEquals("hszhedplvwiwu", model.cloud()); - Assertions.assertEquals("wmbesldnkw", model.zone()); - Assertions.assertEquals("pp", model.region()); - Assertions.assertEquals("lcxog", model.kafkaBootstrapEndpoint()); - Assertions.assertEquals("konzmnsik", model.httpEndpoint()); - Assertions.assertEquals("kqze", model.apiEndpoint()); - Assertions.assertEquals("dltfz", model.config().kind()); - Assertions.assertEquals("hvhgureod", model.environment().id()); - Assertions.assertEquals("obdagxtibqdxb", model.environment().environment()); - Assertions.assertEquals("akbogqxndlkzgxh", model.environment().related()); - Assertions.assertEquals("iplbpodxunkbebxm", model.environment().resourceName()); - Assertions.assertEquals("yntwlrbq", model.network().id()); - Assertions.assertEquals("oievseotgqrlltm", model.network().environment()); - Assertions.assertEquals("lauwzizxbmpgcjef", model.network().related()); - Assertions.assertEquals("muvp", model.network().resourceName()); - Assertions.assertEquals("d", model.byok().id()); - Assertions.assertEquals("orppxebmnzbtb", model.byok().related()); - Assertions.assertEquals("pglkf", model.byok().resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterStatusEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterStatusEntityTests.java deleted file mode 100644 index f7213bca0f93..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ClusterStatusEntityTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import org.junit.jupiter.api.Assertions; - -public final class ClusterStatusEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ClusterStatusEntity model = BinaryData.fromString("{\"phase\":\"keqdcvdrhvoods\",\"cku\":1475281926}") - .toObject(ClusterStatusEntity.class); - Assertions.assertEquals("keqdcvdrhvoods", model.phase()); - Assertions.assertEquals(1475281926, model.cku()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ClusterStatusEntity model = new ClusterStatusEntity().withPhase("keqdcvdrhvoods").withCku(1475281926); - model = BinaryData.fromObject(model).toObject(ClusterStatusEntity.class); - Assertions.assertEquals("keqdcvdrhvoods", model.phase()); - Assertions.assertEquals(1475281926, model.cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementPropertiesTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementPropertiesTests.java deleted file mode 100644 index 92887d948de3..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementPropertiesTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ConfluentAgreementProperties; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; - -public final class ConfluentAgreementPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConfluentAgreementProperties model = BinaryData.fromString( - "{\"publisher\":\"fwvuk\",\"product\":\"audccsnhs\",\"plan\":\"nyejhkryhtnap\",\"licenseTextLink\":\"wlokjyem\",\"privacyPolicyLink\":\"vnipjox\",\"retrieveDatetime\":\"2020-12-27T04:20:57Z\",\"signature\":\"hgejspodma\",\"accepted\":false}") - .toObject(ConfluentAgreementProperties.class); - Assertions.assertEquals("fwvuk", model.publisher()); - Assertions.assertEquals("audccsnhs", model.product()); - Assertions.assertEquals("nyejhkryhtnap", model.plan()); - Assertions.assertEquals("wlokjyem", model.licenseTextLink()); - Assertions.assertEquals("vnipjox", model.privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2020-12-27T04:20:57Z"), model.retrieveDatetime()); - Assertions.assertEquals("hgejspodma", model.signature()); - Assertions.assertEquals(false, model.accepted()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConfluentAgreementProperties model = new ConfluentAgreementProperties().withPublisher("fwvuk") - .withProduct("audccsnhs") - .withPlan("nyejhkryhtnap") - .withLicenseTextLink("wlokjyem") - .withPrivacyPolicyLink("vnipjox") - .withRetrieveDatetime(OffsetDateTime.parse("2020-12-27T04:20:57Z")) - .withSignature("hgejspodma") - .withAccepted(false); - model = BinaryData.fromObject(model).toObject(ConfluentAgreementProperties.class); - Assertions.assertEquals("fwvuk", model.publisher()); - Assertions.assertEquals("audccsnhs", model.product()); - Assertions.assertEquals("nyejhkryhtnap", model.plan()); - Assertions.assertEquals("wlokjyem", model.licenseTextLink()); - Assertions.assertEquals("vnipjox", model.privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2020-12-27T04:20:57Z"), model.retrieveDatetime()); - Assertions.assertEquals("hgejspodma", model.signature()); - Assertions.assertEquals(false, model.accepted()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceInnerTests.java deleted file mode 100644 index c44db2c907ad..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceInnerTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ConfluentAgreementResourceInner; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; - -public final class ConfluentAgreementResourceInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConfluentAgreementResourceInner model = BinaryData.fromString( - "{\"properties\":{\"publisher\":\"bpzvgn\",\"product\":\"symglzufcyz\",\"plan\":\"hdbihan\",\"licenseTextLink\":\"hfcbjysa\",\"privacyPolicyLink\":\"th\",\"retrieveDatetime\":\"2021-11-04T10:26:32Z\",\"signature\":\"bifpikxwczb\",\"accepted\":true},\"id\":\"npqxuh\",\"name\":\"vyq\",\"type\":\"iwbybrkxvdumjg\"}") - .toObject(ConfluentAgreementResourceInner.class); - Assertions.assertEquals("bpzvgn", model.publisher()); - Assertions.assertEquals("symglzufcyz", model.product()); - Assertions.assertEquals("hdbihan", model.plan()); - Assertions.assertEquals("hfcbjysa", model.licenseTextLink()); - Assertions.assertEquals("th", model.privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-04T10:26:32Z"), model.retrieveDatetime()); - Assertions.assertEquals("bifpikxwczb", model.signature()); - Assertions.assertEquals(true, model.accepted()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConfluentAgreementResourceInner model = new ConfluentAgreementResourceInner().withPublisher("bpzvgn") - .withProduct("symglzufcyz") - .withPlan("hdbihan") - .withLicenseTextLink("hfcbjysa") - .withPrivacyPolicyLink("th") - .withRetrieveDatetime(OffsetDateTime.parse("2021-11-04T10:26:32Z")) - .withSignature("bifpikxwczb") - .withAccepted(true); - model = BinaryData.fromObject(model).toObject(ConfluentAgreementResourceInner.class); - Assertions.assertEquals("bpzvgn", model.publisher()); - Assertions.assertEquals("symglzufcyz", model.product()); - Assertions.assertEquals("hdbihan", model.plan()); - Assertions.assertEquals("hfcbjysa", model.licenseTextLink()); - Assertions.assertEquals("th", model.privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-11-04T10:26:32Z"), model.retrieveDatetime()); - Assertions.assertEquals("bifpikxwczb", model.signature()); - Assertions.assertEquals(true, model.accepted()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceListResponseTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceListResponseTests.java deleted file mode 100644 index 2dbe1d1b750d..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentAgreementResourceListResponseTests.java +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ConfluentAgreementResourceInner; -import com.azure.resourcemanager.confluent.models.ConfluentAgreementResourceListResponse; -import java.time.OffsetDateTime; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ConfluentAgreementResourceListResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConfluentAgreementResourceListResponse model = BinaryData.fromString( - "{\"value\":[{\"properties\":{\"publisher\":\"vgjxpybczm\",\"product\":\"mtz\",\"plan\":\"bsphrupidgsybbe\",\"licenseTextLink\":\"ph\",\"privacyPolicyLink\":\"cmsxaobhdxbm\",\"retrieveDatetime\":\"2021-07-17T05:00:46Z\",\"signature\":\"qj\",\"accepted\":false},\"id\":\"tbmufpo\",\"name\":\"noi\",\"type\":\"hwlrx\"},{\"properties\":{\"publisher\":\"oqijgkdmbpaz\",\"product\":\"bc\",\"plan\":\"pdznrbtcqqjnqgl\",\"licenseTextLink\":\"gnufoooj\",\"privacyPolicyLink\":\"ifsqesaagdfmg\",\"retrieveDatetime\":\"2021-05-15T08:16:53Z\",\"signature\":\"j\",\"accepted\":true},\"id\":\"f\",\"name\":\"wmrvktsizntocipa\",\"type\":\"uajpsquc\"},{\"properties\":{\"publisher\":\"fdkfogk\",\"product\":\"gjofjd\",\"plan\":\"qs\",\"licenseTextLink\":\"eupewnwreitjz\",\"privacyPolicyLink\":\"lusarh\",\"retrieveDatetime\":\"2021-11-04T00:37:18Z\",\"signature\":\"qhsmyurkdtml\",\"accepted\":false},\"id\":\"kuksjtxukcdm\",\"name\":\"arcryuanzwuxzdxt\",\"type\":\"yrlhmwhfpmrqobm\"}],\"nextLink\":\"kknryrtihf\"}") - .toObject(ConfluentAgreementResourceListResponse.class); - Assertions.assertEquals("vgjxpybczm", model.value().get(0).publisher()); - Assertions.assertEquals("mtz", model.value().get(0).product()); - Assertions.assertEquals("bsphrupidgsybbe", model.value().get(0).plan()); - Assertions.assertEquals("ph", model.value().get(0).licenseTextLink()); - Assertions.assertEquals("cmsxaobhdxbm", model.value().get(0).privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-17T05:00:46Z"), model.value().get(0).retrieveDatetime()); - Assertions.assertEquals("qj", model.value().get(0).signature()); - Assertions.assertEquals(false, model.value().get(0).accepted()); - Assertions.assertEquals("kknryrtihf", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConfluentAgreementResourceListResponse model - = new ConfluentAgreementResourceListResponse().withValue(Arrays.asList( - new ConfluentAgreementResourceInner().withPublisher("vgjxpybczm") - .withProduct("mtz") - .withPlan("bsphrupidgsybbe") - .withLicenseTextLink("ph") - .withPrivacyPolicyLink("cmsxaobhdxbm") - .withRetrieveDatetime(OffsetDateTime.parse("2021-07-17T05:00:46Z")) - .withSignature("qj") - .withAccepted(false), - new ConfluentAgreementResourceInner().withPublisher("oqijgkdmbpaz") - .withProduct("bc") - .withPlan("pdznrbtcqqjnqgl") - .withLicenseTextLink("gnufoooj") - .withPrivacyPolicyLink("ifsqesaagdfmg") - .withRetrieveDatetime(OffsetDateTime.parse("2021-05-15T08:16:53Z")) - .withSignature("j") - .withAccepted(true), - new ConfluentAgreementResourceInner().withPublisher("fdkfogk") - .withProduct("gjofjd") - .withPlan("qs") - .withLicenseTextLink("eupewnwreitjz") - .withPrivacyPolicyLink("lusarh") - .withRetrieveDatetime(OffsetDateTime.parse("2021-11-04T00:37:18Z")) - .withSignature("qhsmyurkdtml") - .withAccepted(false))) - .withNextLink("kknryrtihf"); - model = BinaryData.fromObject(model).toObject(ConfluentAgreementResourceListResponse.class); - Assertions.assertEquals("vgjxpybczm", model.value().get(0).publisher()); - Assertions.assertEquals("mtz", model.value().get(0).product()); - Assertions.assertEquals("bsphrupidgsybbe", model.value().get(0).plan()); - Assertions.assertEquals("ph", model.value().get(0).licenseTextLink()); - Assertions.assertEquals("cmsxaobhdxbm", model.value().get(0).privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-07-17T05:00:46Z"), model.value().get(0).retrieveDatetime()); - Assertions.assertEquals("qj", model.value().get(0).signature()); - Assertions.assertEquals(false, model.value().get(0).accepted()); - Assertions.assertEquals("kknryrtihf", model.nextLink()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentListMetadataTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentListMetadataTests.java deleted file mode 100644 index dc5e33bfe358..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ConfluentListMetadataTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ConfluentListMetadata; -import org.junit.jupiter.api.Assertions; - -public final class ConfluentListMetadataTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ConfluentListMetadata model = BinaryData.fromString( - "{\"first\":\"uhrzayvvt\",\"last\":\"vdfgiotk\",\"prev\":\"utqxlngx\",\"next\":\"fgugnxkrxdqmid\",\"total_size\":1583346323}") - .toObject(ConfluentListMetadata.class); - Assertions.assertEquals("uhrzayvvt", model.first()); - Assertions.assertEquals("vdfgiotk", model.last()); - Assertions.assertEquals("utqxlngx", model.prev()); - Assertions.assertEquals("fgugnxkrxdqmid", model.next()); - Assertions.assertEquals(1583346323, model.totalSize()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ConfluentListMetadata model = new ConfluentListMetadata().withFirst("uhrzayvvt") - .withLast("vdfgiotk") - .withPrev("utqxlngx") - .withNext("fgugnxkrxdqmid") - .withTotalSize(1583346323); - model = BinaryData.fromObject(model).toObject(ConfluentListMetadata.class); - Assertions.assertEquals("uhrzayvvt", model.first()); - Assertions.assertEquals("vdfgiotk", model.last()); - Assertions.assertEquals("utqxlngx", model.prev()); - Assertions.assertEquals("fgugnxkrxdqmid", model.next()); - Assertions.assertEquals(1583346323, model.totalSize()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/CreateApiKeyModelTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/CreateApiKeyModelTests.java deleted file mode 100644 index 38e239c1e8e2..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/CreateApiKeyModelTests.java +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.CreateApiKeyModel; -import org.junit.jupiter.api.Assertions; - -public final class CreateApiKeyModelTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - CreateApiKeyModel model - = BinaryData.fromString("{\"name\":\"wp\",\"description\":\"sutrgjup\"}").toObject(CreateApiKeyModel.class); - Assertions.assertEquals("wp", model.name()); - Assertions.assertEquals("sutrgjup", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - CreateApiKeyModel model = new CreateApiKeyModel().withName("wp").withDescription("sutrgjup"); - model = BinaryData.fromObject(model).toObject(CreateApiKeyModel.class); - Assertions.assertEquals("wp", model.name()); - Assertions.assertEquals("sutrgjup", model.description()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentPropertiesTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentPropertiesTests.java deleted file mode 100644 index f452b7ca8365..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentPropertiesTests.java +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.EnvironmentProperties; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class EnvironmentPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EnvironmentProperties model = BinaryData.fromString( - "{\"metadata\":{\"self\":\"jhxbld\",\"resourceName\":\"wwrlkdmtncv\",\"createdTimestamp\":\"otllxdyhgsyo\",\"updatedTimestamp\":\"gjltdtbnnhado\",\"deletedTimestamp\":\"rkvcikhnvpa\"}}") - .toObject(EnvironmentProperties.class); - Assertions.assertEquals("jhxbld", model.metadata().self()); - Assertions.assertEquals("wwrlkdmtncv", model.metadata().resourceName()); - Assertions.assertEquals("otllxdyhgsyo", model.metadata().createdTimestamp()); - Assertions.assertEquals("gjltdtbnnhado", model.metadata().updatedTimestamp()); - Assertions.assertEquals("rkvcikhnvpa", model.metadata().deletedTimestamp()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EnvironmentProperties model = new EnvironmentProperties().withMetadata(new SCMetadataEntity().withSelf("jhxbld") - .withResourceName("wwrlkdmtncv") - .withCreatedTimestamp("otllxdyhgsyo") - .withUpdatedTimestamp("gjltdtbnnhado") - .withDeletedTimestamp("rkvcikhnvpa")); - model = BinaryData.fromObject(model).toObject(EnvironmentProperties.class); - Assertions.assertEquals("jhxbld", model.metadata().self()); - Assertions.assertEquals("wwrlkdmtncv", model.metadata().resourceName()); - Assertions.assertEquals("otllxdyhgsyo", model.metadata().createdTimestamp()); - Assertions.assertEquals("gjltdtbnnhado", model.metadata().updatedTimestamp()); - Assertions.assertEquals("rkvcikhnvpa", model.metadata().deletedTimestamp()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentRecordTests.java deleted file mode 100644 index b091d30344e5..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/EnvironmentRecordTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.EnvironmentRecord; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class EnvironmentRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - EnvironmentRecord model = BinaryData.fromString( - "{\"kind\":\"xnehmpvec\",\"id\":\"odebfqkkrbmpu\",\"metadata\":{\"self\":\"iw\",\"resource_name\":\"zlfbxzpuzycispnq\",\"created_at\":\"hmgkbrpyy\",\"updated_at\":\"ibnuqqkpik\",\"deleted_at\":\"rgvtqag\"},\"display_name\":\"uynhijg\"}") - .toObject(EnvironmentRecord.class); - Assertions.assertEquals("xnehmpvec", model.kind()); - Assertions.assertEquals("odebfqkkrbmpu", model.id()); - Assertions.assertEquals("iw", model.metadata().self()); - Assertions.assertEquals("zlfbxzpuzycispnq", model.metadata().resourceName()); - Assertions.assertEquals("hmgkbrpyy", model.metadata().createdAt()); - Assertions.assertEquals("ibnuqqkpik", model.metadata().updatedAt()); - Assertions.assertEquals("rgvtqag", model.metadata().deletedAt()); - Assertions.assertEquals("uynhijg", model.displayName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - EnvironmentRecord model = new EnvironmentRecord().withKind("xnehmpvec") - .withId("odebfqkkrbmpu") - .withMetadata(new MetadataEntity().withSelf("iw") - .withResourceName("zlfbxzpuzycispnq") - .withCreatedAt("hmgkbrpyy") - .withUpdatedAt("ibnuqqkpik") - .withDeletedAt("rgvtqag")) - .withDisplayName("uynhijg"); - model = BinaryData.fromObject(model).toObject(EnvironmentRecord.class); - Assertions.assertEquals("xnehmpvec", model.kind()); - Assertions.assertEquals("odebfqkkrbmpu", model.id()); - Assertions.assertEquals("iw", model.metadata().self()); - Assertions.assertEquals("zlfbxzpuzycispnq", model.metadata().resourceName()); - Assertions.assertEquals("hmgkbrpyy", model.metadata().createdAt()); - Assertions.assertEquals("ibnuqqkpik", model.metadata().updatedAt()); - Assertions.assertEquals("rgvtqag", model.metadata().deletedAt()); - Assertions.assertEquals("uynhijg", model.displayName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/GetEnvironmentsResponseTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/GetEnvironmentsResponseTests.java deleted file mode 100644 index b4748b68b9e8..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/GetEnvironmentsResponseTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SCEnvironmentRecordInner; -import com.azure.resourcemanager.confluent.models.GetEnvironmentsResponse; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class GetEnvironmentsResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - GetEnvironmentsResponse model = BinaryData.fromString( - "{\"value\":[{\"kind\":\"dsytgadgvr\",\"id\":\"aeneqnzarrwl\",\"properties\":{\"metadata\":{\"self\":\"jfqka\",\"resourceName\":\"wiipfpub\",\"createdTimestamp\":\"bwwift\",\"updatedTimestamp\":\"qkvpuvksgplsakn\",\"deletedTimestamp\":\"fsynljphuop\"}},\"name\":\"dlqiyntorzih\"}],\"nextLink\":\"osjswsr\"}") - .toObject(GetEnvironmentsResponse.class); - Assertions.assertEquals("dsytgadgvr", model.value().get(0).kind()); - Assertions.assertEquals("aeneqnzarrwl", model.value().get(0).id()); - Assertions.assertEquals("dlqiyntorzih", model.value().get(0).name()); - Assertions.assertEquals("jfqka", model.value().get(0).metadata().self()); - Assertions.assertEquals("wiipfpub", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("bwwift", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("qkvpuvksgplsakn", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("fsynljphuop", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("osjswsr", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - GetEnvironmentsResponse model = new GetEnvironmentsResponse() - .withValue(Arrays.asList(new SCEnvironmentRecordInner().withKind("dsytgadgvr") - .withId("aeneqnzarrwl") - .withName("dlqiyntorzih") - .withMetadata(new SCMetadataEntity().withSelf("jfqka") - .withResourceName("wiipfpub") - .withCreatedTimestamp("bwwift") - .withUpdatedTimestamp("qkvpuvksgplsakn") - .withDeletedTimestamp("fsynljphuop")))) - .withNextLink("osjswsr"); - model = BinaryData.fromObject(model).toObject(GetEnvironmentsResponse.class); - Assertions.assertEquals("dsytgadgvr", model.value().get(0).kind()); - Assertions.assertEquals("aeneqnzarrwl", model.value().get(0).id()); - Assertions.assertEquals("dlqiyntorzih", model.value().get(0).name()); - Assertions.assertEquals("jfqka", model.value().get(0).metadata().self()); - Assertions.assertEquals("wiipfpub", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("bwwift", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("qkvpuvksgplsakn", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("fsynljphuop", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("osjswsr", model.nextLink()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/InvitationRecordInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/InvitationRecordInnerTests.java deleted file mode 100644 index 49bfb0bbaafc..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/InvitationRecordInnerTests.java +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.InvitationRecordInner; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class InvitationRecordInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - InvitationRecordInner model = BinaryData.fromString( - "{\"kind\":\"xxjnspydptk\",\"id\":\"nkoukn\",\"metadata\":{\"self\":\"wtiukbldn\",\"resource_name\":\"pocipazyxoegu\",\"created_at\":\"jnpiucgyg\",\"updated_at\":\"qzntypm\",\"deleted_at\":\"p\"},\"email\":\"c\",\"auth_type\":\"qjsdpydnfyhxdeo\",\"status\":\"zi\",\"accepted_at\":\"ifsjttgzfbishcb\",\"expires_at\":\"ajdeyeamdphaga\"}") - .toObject(InvitationRecordInner.class); - Assertions.assertEquals("xxjnspydptk", model.kind()); - Assertions.assertEquals("nkoukn", model.id()); - Assertions.assertEquals("wtiukbldn", model.metadata().self()); - Assertions.assertEquals("pocipazyxoegu", model.metadata().resourceName()); - Assertions.assertEquals("jnpiucgyg", model.metadata().createdAt()); - Assertions.assertEquals("qzntypm", model.metadata().updatedAt()); - Assertions.assertEquals("p", model.metadata().deletedAt()); - Assertions.assertEquals("c", model.email()); - Assertions.assertEquals("qjsdpydnfyhxdeo", model.authType()); - Assertions.assertEquals("zi", model.status()); - Assertions.assertEquals("ifsjttgzfbishcb", model.acceptedAt()); - Assertions.assertEquals("ajdeyeamdphaga", model.expiresAt()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - InvitationRecordInner model = new InvitationRecordInner().withKind("xxjnspydptk") - .withId("nkoukn") - .withMetadata(new MetadataEntity().withSelf("wtiukbldn") - .withResourceName("pocipazyxoegu") - .withCreatedAt("jnpiucgyg") - .withUpdatedAt("qzntypm") - .withDeletedAt("p")) - .withEmail("c") - .withAuthType("qjsdpydnfyhxdeo") - .withStatus("zi") - .withAcceptedAt("ifsjttgzfbishcb") - .withExpiresAt("ajdeyeamdphaga"); - model = BinaryData.fromObject(model).toObject(InvitationRecordInner.class); - Assertions.assertEquals("xxjnspydptk", model.kind()); - Assertions.assertEquals("nkoukn", model.id()); - Assertions.assertEquals("wtiukbldn", model.metadata().self()); - Assertions.assertEquals("pocipazyxoegu", model.metadata().resourceName()); - Assertions.assertEquals("jnpiucgyg", model.metadata().createdAt()); - Assertions.assertEquals("qzntypm", model.metadata().updatedAt()); - Assertions.assertEquals("p", model.metadata().deletedAt()); - Assertions.assertEquals("c", model.email()); - Assertions.assertEquals("qjsdpydnfyhxdeo", model.authType()); - Assertions.assertEquals("zi", model.status()); - Assertions.assertEquals("ifsjttgzfbishcb", model.acceptedAt()); - Assertions.assertEquals("ajdeyeamdphaga", model.expiresAt()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListAccessRequestModelTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListAccessRequestModelTests.java deleted file mode 100644 index 41722fcddc70..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListAccessRequestModelTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ListAccessRequestModelTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ListAccessRequestModel model - = BinaryData.fromString("{\"searchFilters\":{\"nohjt\":\"xhcr\",\"soifiyipjxsqw\":\"kwh\"}}") - .toObject(ListAccessRequestModel.class); - Assertions.assertEquals("xhcr", model.searchFilters().get("nohjt")); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ListAccessRequestModel model - = new ListAccessRequestModel().withSearchFilters(mapOf("nohjt", "xhcr", "soifiyipjxsqw", "kwh")); - model = BinaryData.fromObject(model).toObject(ListAccessRequestModel.class); - Assertions.assertEquals("xhcr", model.searchFilters().get("nohjt")); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListClustersSuccessResponseTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListClustersSuccessResponseTests.java deleted file mode 100644 index 5a8da5672a5c..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListClustersSuccessResponseTests.java +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import com.azure.resourcemanager.confluent.models.ListClustersSuccessResponse; -import com.azure.resourcemanager.confluent.models.SCClusterByokEntity; -import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ListClustersSuccessResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ListClustersSuccessResponse model = BinaryData.fromString( - "{\"value\":[{\"kind\":\"mtdaa\",\"id\":\"dvwvgpio\",\"properties\":{\"metadata\":{\"self\":\"rtfudxepxg\",\"resourceName\":\"agvrvmnpkuk\",\"createdTimestamp\":\"i\",\"updatedTimestamp\":\"blxgwimf\",\"deletedTimestamp\":\"hfjx\"},\"spec\":{\"name\":\"zk\",\"availability\":\"oqreyfkzikfjawn\",\"cloud\":\"ivx\",\"zone\":\"zel\",\"region\":\"irels\",\"kafkaBootstrapEndpoint\":\"aenwabf\",\"httpEndpoint\":\"kl\",\"apiEndpoint\":\"xbjhwuaanozjosph\",\"config\":{\"kind\":\"l\"},\"environment\":{\"id\":\"v\",\"environment\":\"glrvimjwosytxi\",\"related\":\"skfc\",\"resourceName\":\"qumiek\"},\"network\":{\"id\":\"zikhl\",\"environment\":\"jhdgqggebdunyga\",\"related\":\"idb\",\"resourceName\":\"atpxl\"},\"byok\":{\"id\":\"cyjmoadsuvarmy\",\"related\":\"mjsjqb\",\"resourceName\":\"hyxxrwlycoduhpk\"}},\"status\":{\"phase\":\"ymareqnajxqugj\",\"cku\":123081657}},\"name\":\"ubeddg\"},{\"kind\":\"ofwq\",\"id\":\"qal\",\"properties\":{\"metadata\":{\"self\":\"jijpxac\",\"resourceName\":\"udfnbyxba\",\"createdTimestamp\":\"bjyvay\",\"updatedTimestamp\":\"imrzrtuzqog\",\"deletedTimestamp\":\"xnevfdnwn\"},\"spec\":{\"name\":\"wzsyyceuzs\",\"availability\":\"bjudpfrxtrthzv\",\"cloud\":\"tdwkqbrq\",\"zone\":\"paxh\",\"region\":\"iilivpdtiirqtd\",\"kafkaBootstrapEndpoint\":\"axoruzfgsquy\",\"httpEndpoint\":\"rxxle\",\"apiEndpoint\":\"ramxjezwlwnw\",\"config\":{\"kind\":\"lcvydy\"},\"environment\":{\"id\":\"dooaojkniodko\",\"environment\":\"bw\",\"related\":\"jhemms\",\"resourceName\":\"dkcrodt\"},\"network\":{\"id\":\"fw\",\"environment\":\"fltkacjv\",\"related\":\"kdlfoa\",\"resourceName\":\"gkfpaga\"},\"byok\":{\"id\":\"ulpqblylsyxkqjn\",\"related\":\"ervtiagxs\",\"resourceName\":\"zuempsbzkf\"}},\"status\":{\"phase\":\"yvpnqicvinvkjj\",\"cku\":602936654}},\"name\":\"buukzclewyhml\"},{\"kind\":\"aztz\",\"id\":\"fn\",\"properties\":{\"metadata\":{\"self\":\"yfzqwhxxbu\",\"resourceName\":\"a\",\"createdTimestamp\":\"feqztppriol\",\"updatedTimestamp\":\"rjaltolmncw\",\"deletedTimestamp\":\"bqwcsdbnwdcf\"},\"spec\":{\"name\":\"qdpfuvglsbjjca\",\"availability\":\"xbvtvudu\",\"cloud\":\"cormr\",\"zone\":\"qtvcofudflvkgj\",\"region\":\"gdknnqv\",\"kafkaBootstrapEndpoint\":\"znqntoru\",\"httpEndpoint\":\"gsahmkycgrauw\",\"apiEndpoint\":\"etaebu\",\"config\":{\"kind\":\"dmovsm\"},\"environment\":{\"id\":\"wabm\",\"environment\":\"efkifr\",\"related\":\"puqujmqlgkfbtn\",\"resourceName\":\"aongbj\"},\"network\":{\"id\":\"ujitcjedftww\",\"environment\":\"zkoj\",\"related\":\"c\",\"resourceName\":\"foqouicybx\"},\"byok\":{\"id\":\"gszufoxciqopid\",\"related\":\"mciodhkhazxkhn\",\"resourceName\":\"onlwntoeg\"}},\"status\":{\"phase\":\"wbw\",\"cku\":924333329}},\"name\":\"zcmrvexztvb\"}],\"nextLink\":\"gsfraoyzkoow\"}") - .toObject(ListClustersSuccessResponse.class); - Assertions.assertEquals("mtdaa", model.value().get(0).kind()); - Assertions.assertEquals("dvwvgpio", model.value().get(0).id()); - Assertions.assertEquals("ubeddg", model.value().get(0).name()); - Assertions.assertEquals("rtfudxepxg", model.value().get(0).metadata().self()); - Assertions.assertEquals("agvrvmnpkuk", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("i", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("blxgwimf", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("hfjx", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("zk", model.value().get(0).spec().name()); - Assertions.assertEquals("oqreyfkzikfjawn", model.value().get(0).spec().availability()); - Assertions.assertEquals("ivx", model.value().get(0).spec().cloud()); - Assertions.assertEquals("zel", model.value().get(0).spec().zone()); - Assertions.assertEquals("irels", model.value().get(0).spec().region()); - Assertions.assertEquals("aenwabf", model.value().get(0).spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("kl", model.value().get(0).spec().httpEndpoint()); - Assertions.assertEquals("xbjhwuaanozjosph", model.value().get(0).spec().apiEndpoint()); - Assertions.assertEquals("l", model.value().get(0).spec().config().kind()); - Assertions.assertEquals("v", model.value().get(0).spec().environment().id()); - Assertions.assertEquals("glrvimjwosytxi", model.value().get(0).spec().environment().environment()); - Assertions.assertEquals("skfc", model.value().get(0).spec().environment().related()); - Assertions.assertEquals("qumiek", model.value().get(0).spec().environment().resourceName()); - Assertions.assertEquals("zikhl", model.value().get(0).spec().network().id()); - Assertions.assertEquals("jhdgqggebdunyga", model.value().get(0).spec().network().environment()); - Assertions.assertEquals("idb", model.value().get(0).spec().network().related()); - Assertions.assertEquals("atpxl", model.value().get(0).spec().network().resourceName()); - Assertions.assertEquals("cyjmoadsuvarmy", model.value().get(0).spec().byok().id()); - Assertions.assertEquals("mjsjqb", model.value().get(0).spec().byok().related()); - Assertions.assertEquals("hyxxrwlycoduhpk", model.value().get(0).spec().byok().resourceName()); - Assertions.assertEquals("ymareqnajxqugj", model.value().get(0).status().phase()); - Assertions.assertEquals(123081657, model.value().get(0).status().cku()); - Assertions.assertEquals("gsfraoyzkoow", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ListClustersSuccessResponse model = new ListClustersSuccessResponse() - .withValue(Arrays.asList( - new SCClusterRecordInner().withKind("mtdaa") - .withId("dvwvgpio") - .withName("ubeddg") - .withMetadata(new SCMetadataEntity().withSelf("rtfudxepxg") - .withResourceName("agvrvmnpkuk") - .withCreatedTimestamp("i") - .withUpdatedTimestamp("blxgwimf") - .withDeletedTimestamp("hfjx")) - .withSpec(new SCClusterSpecEntity().withName("zk") - .withAvailability("oqreyfkzikfjawn") - .withCloud("ivx") - .withZone("zel") - .withRegion("irels") - .withKafkaBootstrapEndpoint("aenwabf") - .withHttpEndpoint("kl") - .withApiEndpoint("xbjhwuaanozjosph") - .withConfig(new ClusterConfigEntity().withKind("l")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("v") - .withEnvironment("glrvimjwosytxi") - .withRelated("skfc") - .withResourceName("qumiek")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("zikhl") - .withEnvironment("jhdgqggebdunyga") - .withRelated("idb") - .withResourceName("atpxl")) - .withByok(new SCClusterByokEntity().withId("cyjmoadsuvarmy") - .withRelated("mjsjqb") - .withResourceName("hyxxrwlycoduhpk"))) - .withStatus(new ClusterStatusEntity().withPhase("ymareqnajxqugj").withCku(123081657)), - new SCClusterRecordInner().withKind("ofwq") - .withId("qal") - .withName("buukzclewyhml") - .withMetadata(new SCMetadataEntity().withSelf("jijpxac") - .withResourceName("udfnbyxba") - .withCreatedTimestamp("bjyvay") - .withUpdatedTimestamp("imrzrtuzqog") - .withDeletedTimestamp("xnevfdnwn")) - .withSpec(new SCClusterSpecEntity().withName("wzsyyceuzs") - .withAvailability("bjudpfrxtrthzv") - .withCloud("tdwkqbrq") - .withZone("paxh") - .withRegion("iilivpdtiirqtd") - .withKafkaBootstrapEndpoint("axoruzfgsquy") - .withHttpEndpoint("rxxle") - .withApiEndpoint("ramxjezwlwnw") - .withConfig(new ClusterConfigEntity().withKind("lcvydy")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("dooaojkniodko") - .withEnvironment("bw") - .withRelated("jhemms") - .withResourceName("dkcrodt")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("fw") - .withEnvironment("fltkacjv") - .withRelated("kdlfoa") - .withResourceName("gkfpaga")) - .withByok(new SCClusterByokEntity().withId("ulpqblylsyxkqjn") - .withRelated("ervtiagxs") - .withResourceName("zuempsbzkf"))) - .withStatus(new ClusterStatusEntity().withPhase("yvpnqicvinvkjj").withCku(602936654)), - new SCClusterRecordInner().withKind("aztz") - .withId("fn") - .withName("zcmrvexztvb") - .withMetadata(new SCMetadataEntity().withSelf("yfzqwhxxbu") - .withResourceName("a") - .withCreatedTimestamp("feqztppriol") - .withUpdatedTimestamp("rjaltolmncw") - .withDeletedTimestamp("bqwcsdbnwdcf")) - .withSpec(new SCClusterSpecEntity().withName("qdpfuvglsbjjca") - .withAvailability("xbvtvudu") - .withCloud("cormr") - .withZone("qtvcofudflvkgj") - .withRegion("gdknnqv") - .withKafkaBootstrapEndpoint("znqntoru") - .withHttpEndpoint("gsahmkycgrauw") - .withApiEndpoint("etaebu") - .withConfig(new ClusterConfigEntity().withKind("dmovsm")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("wabm") - .withEnvironment("efkifr") - .withRelated("puqujmqlgkfbtn") - .withResourceName("aongbj")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("ujitcjedftww") - .withEnvironment("zkoj") - .withRelated("c") - .withResourceName("foqouicybx")) - .withByok(new SCClusterByokEntity().withId("gszufoxciqopid") - .withRelated("mciodhkhazxkhn") - .withResourceName("onlwntoeg"))) - .withStatus(new ClusterStatusEntity().withPhase("wbw").withCku(924333329)))) - .withNextLink("gsfraoyzkoow"); - model = BinaryData.fromObject(model).toObject(ListClustersSuccessResponse.class); - Assertions.assertEquals("mtdaa", model.value().get(0).kind()); - Assertions.assertEquals("dvwvgpio", model.value().get(0).id()); - Assertions.assertEquals("ubeddg", model.value().get(0).name()); - Assertions.assertEquals("rtfudxepxg", model.value().get(0).metadata().self()); - Assertions.assertEquals("agvrvmnpkuk", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("i", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("blxgwimf", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("hfjx", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("zk", model.value().get(0).spec().name()); - Assertions.assertEquals("oqreyfkzikfjawn", model.value().get(0).spec().availability()); - Assertions.assertEquals("ivx", model.value().get(0).spec().cloud()); - Assertions.assertEquals("zel", model.value().get(0).spec().zone()); - Assertions.assertEquals("irels", model.value().get(0).spec().region()); - Assertions.assertEquals("aenwabf", model.value().get(0).spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("kl", model.value().get(0).spec().httpEndpoint()); - Assertions.assertEquals("xbjhwuaanozjosph", model.value().get(0).spec().apiEndpoint()); - Assertions.assertEquals("l", model.value().get(0).spec().config().kind()); - Assertions.assertEquals("v", model.value().get(0).spec().environment().id()); - Assertions.assertEquals("glrvimjwosytxi", model.value().get(0).spec().environment().environment()); - Assertions.assertEquals("skfc", model.value().get(0).spec().environment().related()); - Assertions.assertEquals("qumiek", model.value().get(0).spec().environment().resourceName()); - Assertions.assertEquals("zikhl", model.value().get(0).spec().network().id()); - Assertions.assertEquals("jhdgqggebdunyga", model.value().get(0).spec().network().environment()); - Assertions.assertEquals("idb", model.value().get(0).spec().network().related()); - Assertions.assertEquals("atpxl", model.value().get(0).spec().network().resourceName()); - Assertions.assertEquals("cyjmoadsuvarmy", model.value().get(0).spec().byok().id()); - Assertions.assertEquals("mjsjqb", model.value().get(0).spec().byok().related()); - Assertions.assertEquals("hyxxrwlycoduhpk", model.value().get(0).spec().byok().resourceName()); - Assertions.assertEquals("ymareqnajxqugj", model.value().get(0).status().phase()); - Assertions.assertEquals(123081657, model.value().get(0).status().cku()); - Assertions.assertEquals("gsfraoyzkoow", model.nextLink()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListRegionsSuccessResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListRegionsSuccessResponseInnerTests.java deleted file mode 100644 index 63971cb034ab..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListRegionsSuccessResponseInnerTests.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ListRegionsSuccessResponseInner; -import com.azure.resourcemanager.confluent.models.RegionRecord; -import com.azure.resourcemanager.confluent.models.RegionSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ListRegionsSuccessResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ListRegionsSuccessResponseInner model = BinaryData.fromString( - "{\"data\":[{\"kind\":\"vvt\",\"id\":\"einqf\",\"properties\":{\"metadata\":{\"self\":\"qknp\",\"resourceName\":\"gnepttwqmsni\",\"createdTimestamp\":\"cdm\",\"updatedTimestamp\":\"r\",\"deletedTimestamp\":\"lpijnkrxfrd\"},\"spec\":{\"name\":\"ratiz\",\"cloud\":\"onasxifto\",\"regionName\":\"yzhftwesgogczh\",\"packages\":[\"xkr\",\"gnyhmossxkkg\",\"h\",\"rghxjb\"]}}}]}") - .toObject(ListRegionsSuccessResponseInner.class); - Assertions.assertEquals("vvt", model.data().get(0).kind()); - Assertions.assertEquals("einqf", model.data().get(0).id()); - Assertions.assertEquals("qknp", model.data().get(0).metadata().self()); - Assertions.assertEquals("gnepttwqmsni", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("cdm", model.data().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("r", model.data().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("lpijnkrxfrd", model.data().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("ratiz", model.data().get(0).spec().name()); - Assertions.assertEquals("onasxifto", model.data().get(0).spec().cloud()); - Assertions.assertEquals("yzhftwesgogczh", model.data().get(0).spec().regionName()); - Assertions.assertEquals("xkr", model.data().get(0).spec().packages().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ListRegionsSuccessResponseInner model - = new ListRegionsSuccessResponseInner().withData(Arrays.asList(new RegionRecord().withKind("vvt") - .withId("einqf") - .withMetadata(new SCMetadataEntity().withSelf("qknp") - .withResourceName("gnepttwqmsni") - .withCreatedTimestamp("cdm") - .withUpdatedTimestamp("r") - .withDeletedTimestamp("lpijnkrxfrd")) - .withSpec(new RegionSpecEntity().withName("ratiz") - .withCloud("onasxifto") - .withRegionName("yzhftwesgogczh") - .withPackages(Arrays.asList("xkr", "gnyhmossxkkg", "h", "rghxjb"))))); - model = BinaryData.fromObject(model).toObject(ListRegionsSuccessResponseInner.class); - Assertions.assertEquals("vvt", model.data().get(0).kind()); - Assertions.assertEquals("einqf", model.data().get(0).id()); - Assertions.assertEquals("qknp", model.data().get(0).metadata().self()); - Assertions.assertEquals("gnepttwqmsni", model.data().get(0).metadata().resourceName()); - Assertions.assertEquals("cdm", model.data().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("r", model.data().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("lpijnkrxfrd", model.data().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("ratiz", model.data().get(0).spec().name()); - Assertions.assertEquals("onasxifto", model.data().get(0).spec().cloud()); - Assertions.assertEquals("yzhftwesgogczh", model.data().get(0).spec().regionName()); - Assertions.assertEquals("xkr", model.data().get(0).spec().packages().get(0)); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListSchemaRegistryClustersResponseTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListSchemaRegistryClustersResponseTests.java deleted file mode 100644 index 4ca97bc68028..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ListSchemaRegistryClustersResponseTests.java +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SchemaRegistryClusterRecordInner; -import com.azure.resourcemanager.confluent.models.ListSchemaRegistryClustersResponse; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterEnvironmentRegionEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterStatusEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class ListSchemaRegistryClustersResponseTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ListSchemaRegistryClustersResponse model = BinaryData.fromString( - "{\"value\":[{\"kind\":\"fhwygzlvdnkfxus\",\"id\":\"dwzrmuh\",\"properties\":{\"metadata\":{\"self\":\"qdpsqxqvpsvu\",\"resourceName\":\"mgccelvezrypq\",\"createdTimestamp\":\"feo\",\"updatedTimestamp\":\"rqwky\",\"deletedTimestamp\":\"ob\"},\"spec\":{\"name\":\"xedk\",\"httpEndpoint\":\"epbqpcrfkbw\",\"package\":\"snjvcdwxlpqekftn\",\"region\":{\"id\":\"jsyingwfqatm\",\"related\":\"htmdvy\",\"resourceName\":\"ikdgszywkbir\"},\"environment\":{\"id\":\"zh\",\"related\":\"kj\",\"resourceName\":\"rvqqaatj\"},\"cloud\":\"rv\"},\"status\":{\"phase\":\"pmfi\"}}}],\"nextLink\":\"fggjioolvr\"}") - .toObject(ListSchemaRegistryClustersResponse.class); - Assertions.assertEquals("fhwygzlvdnkfxus", model.value().get(0).kind()); - Assertions.assertEquals("dwzrmuh", model.value().get(0).id()); - Assertions.assertEquals("qdpsqxqvpsvu", model.value().get(0).metadata().self()); - Assertions.assertEquals("mgccelvezrypq", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("feo", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("rqwky", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("ob", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("xedk", model.value().get(0).spec().name()); - Assertions.assertEquals("epbqpcrfkbw", model.value().get(0).spec().httpEndpoint()); - Assertions.assertEquals("snjvcdwxlpqekftn", model.value().get(0).spec().packageProperty()); - Assertions.assertEquals("jsyingwfqatm", model.value().get(0).spec().region().id()); - Assertions.assertEquals("htmdvy", model.value().get(0).spec().region().related()); - Assertions.assertEquals("ikdgszywkbir", model.value().get(0).spec().region().resourceName()); - Assertions.assertEquals("zh", model.value().get(0).spec().environment().id()); - Assertions.assertEquals("kj", model.value().get(0).spec().environment().related()); - Assertions.assertEquals("rvqqaatj", model.value().get(0).spec().environment().resourceName()); - Assertions.assertEquals("rv", model.value().get(0).spec().cloud()); - Assertions.assertEquals("pmfi", model.value().get(0).status().phase()); - Assertions.assertEquals("fggjioolvr", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ListSchemaRegistryClustersResponse model = new ListSchemaRegistryClustersResponse() - .withValue(Arrays.asList(new SchemaRegistryClusterRecordInner().withKind("fhwygzlvdnkfxus") - .withId("dwzrmuh") - .withMetadata(new SCMetadataEntity().withSelf("qdpsqxqvpsvu") - .withResourceName("mgccelvezrypq") - .withCreatedTimestamp("feo") - .withUpdatedTimestamp("rqwky") - .withDeletedTimestamp("ob")) - .withSpec(new SchemaRegistryClusterSpecEntity().withName("xedk") - .withHttpEndpoint("epbqpcrfkbw") - .withPackageProperty("snjvcdwxlpqekftn") - .withRegion(new SchemaRegistryClusterEnvironmentRegionEntity().withId("jsyingwfqatm") - .withRelated("htmdvy") - .withResourceName("ikdgszywkbir")) - .withEnvironment(new SchemaRegistryClusterEnvironmentRegionEntity().withId("zh") - .withRelated("kj") - .withResourceName("rvqqaatj")) - .withCloud("rv")) - .withStatus(new SchemaRegistryClusterStatusEntity().withPhase("pmfi")))) - .withNextLink("fggjioolvr"); - model = BinaryData.fromObject(model).toObject(ListSchemaRegistryClustersResponse.class); - Assertions.assertEquals("fhwygzlvdnkfxus", model.value().get(0).kind()); - Assertions.assertEquals("dwzrmuh", model.value().get(0).id()); - Assertions.assertEquals("qdpsqxqvpsvu", model.value().get(0).metadata().self()); - Assertions.assertEquals("mgccelvezrypq", model.value().get(0).metadata().resourceName()); - Assertions.assertEquals("feo", model.value().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("rqwky", model.value().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("ob", model.value().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("xedk", model.value().get(0).spec().name()); - Assertions.assertEquals("epbqpcrfkbw", model.value().get(0).spec().httpEndpoint()); - Assertions.assertEquals("snjvcdwxlpqekftn", model.value().get(0).spec().packageProperty()); - Assertions.assertEquals("jsyingwfqatm", model.value().get(0).spec().region().id()); - Assertions.assertEquals("htmdvy", model.value().get(0).spec().region().related()); - Assertions.assertEquals("ikdgszywkbir", model.value().get(0).spec().region().resourceName()); - Assertions.assertEquals("zh", model.value().get(0).spec().environment().id()); - Assertions.assertEquals("kj", model.value().get(0).spec().environment().related()); - Assertions.assertEquals("rvqqaatj", model.value().get(0).spec().environment().resourceName()); - Assertions.assertEquals("rv", model.value().get(0).spec().cloud()); - Assertions.assertEquals("pmfi", model.value().get(0).status().phase()); - Assertions.assertEquals("fggjioolvr", model.nextLink()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateWithResponseMockTests.java deleted file mode 100644 index 014e308cc408..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsCreateWithResponseMockTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.fluent.models.ConfluentAgreementResourceInner; -import com.azure.resourcemanager.confluent.models.ConfluentAgreementResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class MarketplaceAgreementsCreateWithResponseMockTests { - @Test - public void testCreateWithResponse() throws Exception { - String responseStr - = "{\"properties\":{\"publisher\":\"kmr\",\"product\":\"mvvhmxtdrjfuta\",\"plan\":\"ebjvewzcjzn\",\"licenseTextLink\":\"cpmguaadraufact\",\"privacyPolicyLink\":\"hzovaj\",\"retrieveDatetime\":\"2021-02-15T15:09:11Z\",\"signature\":\"xxpshneeku\",\"accepted\":false},\"id\":\"slqubkwdl\",\"name\":\"nrdsutujbazpjuoh\",\"type\":\"inyflnorwmduvwp\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ConfluentAgreementResource response = manager.marketplaceAgreements() - .createWithResponse(new ConfluentAgreementResourceInner().withPublisher("oookkqfq") - .withProduct("vleo") - .withPlan("ml") - .withLicenseTextLink("qtqzfavyv") - .withPrivacyPolicyLink("qybaryeua") - .withRetrieveDatetime(OffsetDateTime.parse("2021-06-23T02:25:47Z")) - .withSignature("abqgzslesjcbh") - .withAccepted(false), com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("kmr", response.publisher()); - Assertions.assertEquals("mvvhmxtdrjfuta", response.product()); - Assertions.assertEquals("ebjvewzcjzn", response.plan()); - Assertions.assertEquals("cpmguaadraufact", response.licenseTextLink()); - Assertions.assertEquals("hzovaj", response.privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-02-15T15:09:11Z"), response.retrieveDatetime()); - Assertions.assertEquals("xxpshneeku", response.signature()); - Assertions.assertEquals(false, response.accepted()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListMockTests.java deleted file mode 100644 index 09c04f253516..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MarketplaceAgreementsListMockTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.ConfluentAgreementResource; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class MarketplaceAgreementsListMockTests { - @Test - public void testList() throws Exception { - String responseStr - = "{\"value\":[{\"properties\":{\"publisher\":\"bth\",\"product\":\"tgk\",\"plan\":\"tvdxeclzedqb\",\"licenseTextLink\":\"hzlhplodqkdlww\",\"privacyPolicyLink\":\"bum\",\"retrieveDatetime\":\"2021-01-09T13:22:40Z\",\"signature\":\"rqjfsmlm\",\"accepted\":true},\"id\":\"hwgfwsrt\",\"name\":\"wcoezbrhub\",\"type\":\"kh\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.marketplaceAgreements().list(com.azure.core.util.Context.NONE); - - Assertions.assertEquals("bth", response.iterator().next().publisher()); - Assertions.assertEquals("tgk", response.iterator().next().product()); - Assertions.assertEquals("tvdxeclzedqb", response.iterator().next().plan()); - Assertions.assertEquals("hzlhplodqkdlww", response.iterator().next().licenseTextLink()); - Assertions.assertEquals("bum", response.iterator().next().privacyPolicyLink()); - Assertions.assertEquals(OffsetDateTime.parse("2021-01-09T13:22:40Z"), - response.iterator().next().retrieveDatetime()); - Assertions.assertEquals("rqjfsmlm", response.iterator().next().signature()); - Assertions.assertEquals(true, response.iterator().next().accepted()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MetadataEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MetadataEntityTests.java deleted file mode 100644 index 7dbebe307d01..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/MetadataEntityTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class MetadataEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - MetadataEntity model = BinaryData.fromString( - "{\"self\":\"x\",\"resource_name\":\"qgtz\",\"created_at\":\"pnqbqqwxrjfe\",\"updated_at\":\"lnwsubisn\",\"deleted_at\":\"mpmngnzscxaqwoo\"}") - .toObject(MetadataEntity.class); - Assertions.assertEquals("x", model.self()); - Assertions.assertEquals("qgtz", model.resourceName()); - Assertions.assertEquals("pnqbqqwxrjfe", model.createdAt()); - Assertions.assertEquals("lnwsubisn", model.updatedAt()); - Assertions.assertEquals("mpmngnzscxaqwoo", model.deletedAt()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - MetadataEntity model = new MetadataEntity().withSelf("x") - .withResourceName("qgtz") - .withCreatedAt("pnqbqqwxrjfe") - .withUpdatedAt("lnwsubisn") - .withDeletedAt("mpmngnzscxaqwoo"); - model = BinaryData.fromObject(model).toObject(MetadataEntity.class); - Assertions.assertEquals("x", model.self()); - Assertions.assertEquals("qgtz", model.resourceName()); - Assertions.assertEquals("pnqbqqwxrjfe", model.createdAt()); - Assertions.assertEquals("lnwsubisn", model.updatedAt()); - Assertions.assertEquals("mpmngnzscxaqwoo", model.deletedAt()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OfferDetailTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OfferDetailTests.java deleted file mode 100644 index 2003e07eba2c..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OfferDetailTests.java +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.OfferDetail; -import com.azure.resourcemanager.confluent.models.SaaSOfferStatus; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OfferDetailTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OfferDetail model = BinaryData.fromString( - "{\"publisherId\":\"bhtqqrolfpfpsa\",\"id\":\"gbquxigj\",\"planId\":\"jgzjaoyfhrtx\",\"planName\":\"lnerkujysvleju\",\"termUnit\":\"fqawrlyxw\",\"termId\":\"cpr\",\"privateOfferId\":\"wbxgjvt\",\"privateOfferIds\":[\"ysszdnrujqguh\"],\"status\":\"Suspended\"}") - .toObject(OfferDetail.class); - Assertions.assertEquals("bhtqqrolfpfpsa", model.publisherId()); - Assertions.assertEquals("gbquxigj", model.id()); - Assertions.assertEquals("jgzjaoyfhrtx", model.planId()); - Assertions.assertEquals("lnerkujysvleju", model.planName()); - Assertions.assertEquals("fqawrlyxw", model.termUnit()); - Assertions.assertEquals("cpr", model.termId()); - Assertions.assertEquals("wbxgjvt", model.privateOfferId()); - Assertions.assertEquals("ysszdnrujqguh", model.privateOfferIds().get(0)); - Assertions.assertEquals(SaaSOfferStatus.SUSPENDED, model.status()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OfferDetail model = new OfferDetail().withPublisherId("bhtqqrolfpfpsa") - .withId("gbquxigj") - .withPlanId("jgzjaoyfhrtx") - .withPlanName("lnerkujysvleju") - .withTermUnit("fqawrlyxw") - .withTermId("cpr") - .withPrivateOfferId("wbxgjvt") - .withPrivateOfferIds(Arrays.asList("ysszdnrujqguh")) - .withStatus(SaaSOfferStatus.SUSPENDED); - model = BinaryData.fromObject(model).toObject(OfferDetail.class); - Assertions.assertEquals("bhtqqrolfpfpsa", model.publisherId()); - Assertions.assertEquals("gbquxigj", model.id()); - Assertions.assertEquals("jgzjaoyfhrtx", model.planId()); - Assertions.assertEquals("lnerkujysvleju", model.planName()); - Assertions.assertEquals("fqawrlyxw", model.termUnit()); - Assertions.assertEquals("cpr", model.termId()); - Assertions.assertEquals("wbxgjvt", model.privateOfferId()); - Assertions.assertEquals("ysszdnrujqguh", model.privateOfferIds().get(0)); - Assertions.assertEquals(SaaSOfferStatus.SUSPENDED, model.status()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationDisplayTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationDisplayTests.java deleted file mode 100644 index ae7cf51e4596..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationDisplayTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.OperationDisplay; -import org.junit.jupiter.api.Assertions; - -public final class OperationDisplayTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationDisplay model = BinaryData.fromString( - "{\"provider\":\"ojakhmsbzjhcrze\",\"resource\":\"phlxa\",\"operation\":\"thqt\",\"description\":\"qjbpfzfsin\"}") - .toObject(OperationDisplay.class); - Assertions.assertEquals("ojakhmsbzjhcrze", model.provider()); - Assertions.assertEquals("phlxa", model.resource()); - Assertions.assertEquals("thqt", model.operation()); - Assertions.assertEquals("qjbpfzfsin", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationDisplay model = new OperationDisplay().withProvider("ojakhmsbzjhcrze") - .withResource("phlxa") - .withOperation("thqt") - .withDescription("qjbpfzfsin"); - model = BinaryData.fromObject(model).toObject(OperationDisplay.class); - Assertions.assertEquals("ojakhmsbzjhcrze", model.provider()); - Assertions.assertEquals("phlxa", model.resource()); - Assertions.assertEquals("thqt", model.operation()); - Assertions.assertEquals("qjbpfzfsin", model.description()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationListResultTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationListResultTests.java deleted file mode 100644 index 2b9de9b3cb65..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationListResultTests.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.OperationResultInner; -import com.azure.resourcemanager.confluent.models.OperationDisplay; -import com.azure.resourcemanager.confluent.models.OperationListResult; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class OperationListResultTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationListResult model = BinaryData.fromString( - "{\"value\":[{\"name\":\"h\",\"display\":{\"provider\":\"yahux\",\"resource\":\"pmqnja\",\"operation\":\"ixjsprozvcputeg\",\"description\":\"wmfdatscmdvpjhul\"},\"isDataAction\":false}],\"nextLink\":\"mkjozkrwf\"}") - .toObject(OperationListResult.class); - Assertions.assertEquals("h", model.value().get(0).name()); - Assertions.assertEquals("yahux", model.value().get(0).display().provider()); - Assertions.assertEquals("pmqnja", model.value().get(0).display().resource()); - Assertions.assertEquals("ixjsprozvcputeg", model.value().get(0).display().operation()); - Assertions.assertEquals("wmfdatscmdvpjhul", model.value().get(0).display().description()); - Assertions.assertEquals(false, model.value().get(0).isDataAction()); - Assertions.assertEquals("mkjozkrwf", model.nextLink()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationListResult model - = new OperationListResult().withValue(Arrays.asList(new OperationResultInner().withName("h") - .withDisplay(new OperationDisplay().withProvider("yahux") - .withResource("pmqnja") - .withOperation("ixjsprozvcputeg") - .withDescription("wmfdatscmdvpjhul")) - .withIsDataAction(false))).withNextLink("mkjozkrwf"); - model = BinaryData.fromObject(model).toObject(OperationListResult.class); - Assertions.assertEquals("h", model.value().get(0).name()); - Assertions.assertEquals("yahux", model.value().get(0).display().provider()); - Assertions.assertEquals("pmqnja", model.value().get(0).display().resource()); - Assertions.assertEquals("ixjsprozvcputeg", model.value().get(0).display().operation()); - Assertions.assertEquals("wmfdatscmdvpjhul", model.value().get(0).display().description()); - Assertions.assertEquals(false, model.value().get(0).isDataAction()); - Assertions.assertEquals("mkjozkrwf", model.nextLink()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationResultInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationResultInnerTests.java deleted file mode 100644 index c15ced8ca0e1..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OperationResultInnerTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.OperationResultInner; -import com.azure.resourcemanager.confluent.models.OperationDisplay; -import org.junit.jupiter.api.Assertions; - -public final class OperationResultInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OperationResultInner model = BinaryData.fromString( - "{\"name\":\"iodjp\",\"display\":{\"provider\":\"ej\",\"resource\":\"vwryoqpso\",\"operation\":\"ctazakljlahbcryf\",\"description\":\"fdosyg\"},\"isDataAction\":true}") - .toObject(OperationResultInner.class); - Assertions.assertEquals("iodjp", model.name()); - Assertions.assertEquals("ej", model.display().provider()); - Assertions.assertEquals("vwryoqpso", model.display().resource()); - Assertions.assertEquals("ctazakljlahbcryf", model.display().operation()); - Assertions.assertEquals("fdosyg", model.display().description()); - Assertions.assertEquals(true, model.isDataAction()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OperationResultInner model = new OperationResultInner().withName("iodjp") - .withDisplay(new OperationDisplay().withProvider("ej") - .withResource("vwryoqpso") - .withOperation("ctazakljlahbcryf") - .withDescription("fdosyg")) - .withIsDataAction(true); - model = BinaryData.fromObject(model).toObject(OperationResultInner.class); - Assertions.assertEquals("iodjp", model.name()); - Assertions.assertEquals("ej", model.display().provider()); - Assertions.assertEquals("vwryoqpso", model.display().resource()); - Assertions.assertEquals("ctazakljlahbcryf", model.display().operation()); - Assertions.assertEquals("fdosyg", model.display().description()); - Assertions.assertEquals(true, model.isDataAction()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListMockTests.java deleted file mode 100644 index b1795de0fafa..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationOperationsListMockTests.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.OperationResult; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationOperationsListMockTests { - @Test - public void testList() throws Exception { - String responseStr - = "{\"value\":[{\"name\":\"vxwmygd\",\"display\":{\"provider\":\"pqchiszep\",\"resource\":\"bjcrxgibbdaxco\",\"operation\":\"ozauorsukokwb\",\"description\":\"lhlv\"},\"isDataAction\":false}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response - = manager.organizationOperations().list(com.azure.core.util.Context.NONE); - - Assertions.assertEquals("vxwmygd", response.iterator().next().name()); - Assertions.assertEquals("pqchiszep", response.iterator().next().display().provider()); - Assertions.assertEquals("bjcrxgibbdaxco", response.iterator().next().display().resource()); - Assertions.assertEquals("ozauorsukokwb", response.iterator().next().display().operation()); - Assertions.assertEquals("lhlv", response.iterator().next().display().description()); - Assertions.assertEquals(false, response.iterator().next().isDataAction()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationResourceUpdateTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationResourceUpdateTests.java deleted file mode 100644 index 3f23fe4cae64..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationResourceUpdateTests.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.OrganizationResourceUpdate; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class OrganizationResourceUpdateTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - OrganizationResourceUpdate model = BinaryData - .fromString( - "{\"tags\":{\"km\":\"xmnteiwaop\",\"mzidnsezcxtb\":\"jcmmxdcufufsrp\",\"dwzjeiach\":\"sgfyccsnew\"}}") - .toObject(OrganizationResourceUpdate.class); - Assertions.assertEquals("xmnteiwaop", model.tags().get("km")); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - OrganizationResourceUpdate model = new OrganizationResourceUpdate() - .withTags(mapOf("km", "xmnteiwaop", "mzidnsezcxtb", "jcmmxdcufufsrp", "dwzjeiach", "sgfyccsnew")); - model = BinaryData.fromObject(model).toObject(OrganizationResourceUpdate.class); - Assertions.assertEquals("xmnteiwaop", model.tags().get("km")); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteClusterApiKeyWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteClusterApiKeyWithResponseMockTests.java deleted file mode 100644 index fa981f802308..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteClusterApiKeyWithResponseMockTests.java +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsDeleteClusterApiKeyWithResponseMockTests { - @Test - public void testDeleteClusterApiKeyWithResponse() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.organizations() - .deleteClusterApiKeyWithResponse("tilaxh", "fhqlyvi", "ouwivkxoyzunbixx", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteMockTests.java deleted file mode 100644 index baa513bc04d5..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsDeleteMockTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsDeleteMockTests { - @Test - public void testDelete() throws Exception { - String responseStr = "{}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - manager.organizations().delete("sspuunnoxyhkx", "qddrihpfhoqcaae", com.azure.core.util.Context.NONE); - - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetClusterByIdWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetClusterByIdWithResponseMockTests.java deleted file mode 100644 index 9877e08270a4..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetClusterByIdWithResponseMockTests.java +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SCClusterRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsGetClusterByIdWithResponseMockTests { - @Test - public void testGetClusterByIdWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"bxv\",\"id\":\"htnsi\",\"properties\":{\"metadata\":{\"self\":\"z\",\"resourceName\":\"es\",\"createdTimestamp\":\"dlpagzrcxfail\",\"updatedTimestamp\":\"xwmdboxd\",\"deletedTimestamp\":\"sftufqobrjlna\"},\"spec\":{\"name\":\"ckknhxkizvy\",\"availability\":\"rzvul\",\"cloud\":\"aaeranokqgukk\",\"zone\":\"nvbroylaxx\",\"region\":\"cdisd\",\"kafkaBootstrapEndpoint\":\"fj\",\"httpEndpoint\":\"svgjrwhryvy\",\"apiEndpoint\":\"t\",\"config\":{\"kind\":\"xgccknfnw\"},\"environment\":{\"id\":\"mvpdvjdhttzaef\",\"environment\":\"x\",\"related\":\"ch\",\"resourceName\":\"hk\"},\"network\":{\"id\":\"jdqnsdfzpbgt\",\"environment\":\"ylkdghrje\",\"related\":\"tl\",\"resourceName\":\"ez\"},\"byok\":{\"id\":\"okvbwnhhtqlgehg\",\"related\":\"ipifhpfeoajvg\",\"resourceName\":\"txjcsheafidlt\"}},\"status\":{\"phase\":\"resmkssjhoiftxfk\",\"cku\":182361785}},\"name\":\"prhptillu\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SCClusterRecord response = manager.organizations() - .getClusterByIdWithResponse("zeexavoxtfgle", "dmdqb", "pypqtgsfj", "cbslhhx", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("bxv", response.kind()); - Assertions.assertEquals("htnsi", response.id()); - Assertions.assertEquals("prhptillu", response.name()); - Assertions.assertEquals("z", response.metadata().self()); - Assertions.assertEquals("es", response.metadata().resourceName()); - Assertions.assertEquals("dlpagzrcxfail", response.metadata().createdTimestamp()); - Assertions.assertEquals("xwmdboxd", response.metadata().updatedTimestamp()); - Assertions.assertEquals("sftufqobrjlna", response.metadata().deletedTimestamp()); - Assertions.assertEquals("ckknhxkizvy", response.spec().name()); - Assertions.assertEquals("rzvul", response.spec().availability()); - Assertions.assertEquals("aaeranokqgukk", response.spec().cloud()); - Assertions.assertEquals("nvbroylaxx", response.spec().zone()); - Assertions.assertEquals("cdisd", response.spec().region()); - Assertions.assertEquals("fj", response.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("svgjrwhryvy", response.spec().httpEndpoint()); - Assertions.assertEquals("t", response.spec().apiEndpoint()); - Assertions.assertEquals("xgccknfnw", response.spec().config().kind()); - Assertions.assertEquals("mvpdvjdhttzaef", response.spec().environment().id()); - Assertions.assertEquals("x", response.spec().environment().environment()); - Assertions.assertEquals("ch", response.spec().environment().related()); - Assertions.assertEquals("hk", response.spec().environment().resourceName()); - Assertions.assertEquals("jdqnsdfzpbgt", response.spec().network().id()); - Assertions.assertEquals("ylkdghrje", response.spec().network().environment()); - Assertions.assertEquals("tl", response.spec().network().related()); - Assertions.assertEquals("ez", response.spec().network().resourceName()); - Assertions.assertEquals("okvbwnhhtqlgehg", response.spec().byok().id()); - Assertions.assertEquals("ipifhpfeoajvg", response.spec().byok().related()); - Assertions.assertEquals("txjcsheafidlt", response.spec().byok().resourceName()); - Assertions.assertEquals("resmkssjhoiftxfk", response.status().phase()); - Assertions.assertEquals(182361785, response.status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetEnvironmentByIdWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetEnvironmentByIdWithResponseMockTests.java deleted file mode 100644 index d536ef9deebd..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetEnvironmentByIdWithResponseMockTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SCEnvironmentRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsGetEnvironmentByIdWithResponseMockTests { - @Test - public void testGetEnvironmentByIdWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"lds\",\"id\":\"stjbkkdmfl\",\"properties\":{\"metadata\":{\"self\":\"mjlxrrilozapeewc\",\"resourceName\":\"xlktwkuzi\",\"createdTimestamp\":\"slevufuztc\",\"updatedTimestamp\":\"yhjtqedcgzu\",\"deletedTimestamp\":\"mmrqz\"}},\"name\":\"rjvpglydzgkrvqee\"}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SCEnvironmentRecord response = manager.organizations() - .getEnvironmentByIdWithResponse("wzjbh", "zsxjrkambtrneg", "mnvuqe", com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("lds", response.kind()); - Assertions.assertEquals("stjbkkdmfl", response.id()); - Assertions.assertEquals("rjvpglydzgkrvqee", response.name()); - Assertions.assertEquals("mjlxrrilozapeewc", response.metadata().self()); - Assertions.assertEquals("xlktwkuzi", response.metadata().resourceName()); - Assertions.assertEquals("slevufuztc", response.metadata().createdTimestamp()); - Assertions.assertEquals("yhjtqedcgzu", response.metadata().updatedTimestamp()); - Assertions.assertEquals("mmrqz", response.metadata().deletedTimestamp()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetSchemaRegistryClusterByIdWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetSchemaRegistryClusterByIdWithResponseMockTests.java deleted file mode 100644 index d5f83aecf713..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsGetSchemaRegistryClusterByIdWithResponseMockTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsGetSchemaRegistryClusterByIdWithResponseMockTests { - @Test - public void testGetSchemaRegistryClusterByIdWithResponse() throws Exception { - String responseStr - = "{\"kind\":\"vjskgfmoc\",\"id\":\"hpqgatjeaahhvj\",\"properties\":{\"metadata\":{\"self\":\"kzyb\",\"resourceName\":\"jid\",\"createdTimestamp\":\"syxkyxvxevblb\",\"updatedTimestamp\":\"dnlj\",\"deletedTimestamp\":\"geuaulx\"},\"spec\":{\"name\":\"mjbnk\",\"httpEndpoint\":\"xynenl\",\"package\":\"xeizzg\",\"region\":{\"id\":\"nsrmffeycx\",\"related\":\"tpiymerteea\",\"resourceName\":\"xqiekkkzddrtk\"},\"environment\":{\"id\":\"jbmxvavre\",\"related\":\"eesvecu\",\"resourceName\":\"pxtxsuwp\"},\"cloud\":\"ujwsawddjibabxvi\"},\"status\":{\"phase\":\"v\"}}}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - SchemaRegistryClusterRecord response = manager.organizations() - .getSchemaRegistryClusterByIdWithResponse("ajuwas", "vdaeyyguxakjsq", "hzbezkgi", "sidxasicdd", - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("vjskgfmoc", response.kind()); - Assertions.assertEquals("hpqgatjeaahhvj", response.id()); - Assertions.assertEquals("kzyb", response.metadata().self()); - Assertions.assertEquals("jid", response.metadata().resourceName()); - Assertions.assertEquals("syxkyxvxevblb", response.metadata().createdTimestamp()); - Assertions.assertEquals("dnlj", response.metadata().updatedTimestamp()); - Assertions.assertEquals("geuaulx", response.metadata().deletedTimestamp()); - Assertions.assertEquals("mjbnk", response.spec().name()); - Assertions.assertEquals("xynenl", response.spec().httpEndpoint()); - Assertions.assertEquals("xeizzg", response.spec().packageProperty()); - Assertions.assertEquals("nsrmffeycx", response.spec().region().id()); - Assertions.assertEquals("tpiymerteea", response.spec().region().related()); - Assertions.assertEquals("xqiekkkzddrtk", response.spec().region().resourceName()); - Assertions.assertEquals("jbmxvavre", response.spec().environment().id()); - Assertions.assertEquals("eesvecu", response.spec().environment().related()); - Assertions.assertEquals("pxtxsuwp", response.spec().environment().resourceName()); - Assertions.assertEquals("ujwsawddjibabxvi", response.spec().cloud()); - Assertions.assertEquals("v", response.status().phase()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListClustersMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListClustersMockTests.java deleted file mode 100644 index c5f623212c65..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListClustersMockTests.java +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SCClusterRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsListClustersMockTests { - @Test - public void testListClusters() throws Exception { - String responseStr - = "{\"value\":[{\"kind\":\"dflgzuri\",\"id\":\"aecxndtic\",\"properties\":{\"metadata\":{\"self\":\"zmlqtmldgxo\",\"resourceName\":\"irclnpk\",\"createdTimestamp\":\"ayzri\",\"updatedTimestamp\":\"hya\",\"deletedTimestamp\":\"vjlboxqvk\"},\"spec\":{\"name\":\"xhom\",\"availability\":\"nhdwdigumbnra\",\"cloud\":\"zzp\",\"zone\":\"a\",\"region\":\"sdzhezww\",\"kafkaBootstrapEndpoint\":\"iqyuvvfo\",\"httpEndpoint\":\"p\",\"apiEndpoint\":\"qyikvy\",\"config\":{\"kind\":\"yavluwmncstt\"},\"environment\":{\"id\":\"y\",\"environment\":\"poekrsgsgb\",\"related\":\"uzqgnjdgkynsc\",\"resourceName\":\"qhzvhxnkomt\"},\"network\":{\"id\":\"otppnv\",\"environment\":\"zxhi\",\"related\":\"rbbcevq\",\"resourceName\":\"tltdhlfkqojpy\"},\"byok\":{\"id\":\"trdcnifmzzs\",\"related\":\"m\",\"resourceName\":\"nysuxmprafwgckh\"}},\"status\":{\"phase\":\"vdff\",\"cku\":848051352}},\"name\":\"qrouda\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.organizations() - .listClusters("toepryu", "nwy", "pzdm", 1383258036, "zvfvaawz", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("dflgzuri", response.iterator().next().kind()); - Assertions.assertEquals("aecxndtic", response.iterator().next().id()); - Assertions.assertEquals("qrouda", response.iterator().next().name()); - Assertions.assertEquals("zmlqtmldgxo", response.iterator().next().metadata().self()); - Assertions.assertEquals("irclnpk", response.iterator().next().metadata().resourceName()); - Assertions.assertEquals("ayzri", response.iterator().next().metadata().createdTimestamp()); - Assertions.assertEquals("hya", response.iterator().next().metadata().updatedTimestamp()); - Assertions.assertEquals("vjlboxqvk", response.iterator().next().metadata().deletedTimestamp()); - Assertions.assertEquals("xhom", response.iterator().next().spec().name()); - Assertions.assertEquals("nhdwdigumbnra", response.iterator().next().spec().availability()); - Assertions.assertEquals("zzp", response.iterator().next().spec().cloud()); - Assertions.assertEquals("a", response.iterator().next().spec().zone()); - Assertions.assertEquals("sdzhezww", response.iterator().next().spec().region()); - Assertions.assertEquals("iqyuvvfo", response.iterator().next().spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("p", response.iterator().next().spec().httpEndpoint()); - Assertions.assertEquals("qyikvy", response.iterator().next().spec().apiEndpoint()); - Assertions.assertEquals("yavluwmncstt", response.iterator().next().spec().config().kind()); - Assertions.assertEquals("y", response.iterator().next().spec().environment().id()); - Assertions.assertEquals("poekrsgsgb", response.iterator().next().spec().environment().environment()); - Assertions.assertEquals("uzqgnjdgkynsc", response.iterator().next().spec().environment().related()); - Assertions.assertEquals("qhzvhxnkomt", response.iterator().next().spec().environment().resourceName()); - Assertions.assertEquals("otppnv", response.iterator().next().spec().network().id()); - Assertions.assertEquals("zxhi", response.iterator().next().spec().network().environment()); - Assertions.assertEquals("rbbcevq", response.iterator().next().spec().network().related()); - Assertions.assertEquals("tltdhlfkqojpy", response.iterator().next().spec().network().resourceName()); - Assertions.assertEquals("trdcnifmzzs", response.iterator().next().spec().byok().id()); - Assertions.assertEquals("m", response.iterator().next().spec().byok().related()); - Assertions.assertEquals("nysuxmprafwgckh", response.iterator().next().spec().byok().resourceName()); - Assertions.assertEquals("vdff", response.iterator().next().status().phase()); - Assertions.assertEquals(848051352, response.iterator().next().status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListEnvironmentsMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListEnvironmentsMockTests.java deleted file mode 100644 index 79c92ac50069..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListEnvironmentsMockTests.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SCEnvironmentRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsListEnvironmentsMockTests { - @Test - public void testListEnvironments() throws Exception { - String responseStr - = "{\"value\":[{\"kind\":\"kdncj\",\"id\":\"onbzoggculapzwy\",\"properties\":{\"metadata\":{\"self\":\"gtqxep\",\"resourceName\":\"lbfu\",\"createdTimestamp\":\"lyjt\",\"updatedTimestamp\":\"of\",\"deletedTimestamp\":\"hvfcibyfmow\"}},\"name\":\"rkjpvdwxfzwii\"}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.organizations() - .listEnvironments("dao", "djvlpj", 945284283, "kzbrmsgeivsiy", com.azure.core.util.Context.NONE); - - Assertions.assertEquals("kdncj", response.iterator().next().kind()); - Assertions.assertEquals("onbzoggculapzwy", response.iterator().next().id()); - Assertions.assertEquals("rkjpvdwxfzwii", response.iterator().next().name()); - Assertions.assertEquals("gtqxep", response.iterator().next().metadata().self()); - Assertions.assertEquals("lbfu", response.iterator().next().metadata().resourceName()); - Assertions.assertEquals("lyjt", response.iterator().next().metadata().createdTimestamp()); - Assertions.assertEquals("of", response.iterator().next().metadata().updatedTimestamp()); - Assertions.assertEquals("hvfcibyfmow", response.iterator().next().metadata().deletedTimestamp()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListRegionsWithResponseMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListRegionsWithResponseMockTests.java deleted file mode 100644 index bbf7e18ad08a..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListRegionsWithResponseMockTests.java +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.ListAccessRequestModel; -import com.azure.resourcemanager.confluent.models.ListRegionsSuccessResponse; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsListRegionsWithResponseMockTests { - @Test - public void testListRegionsWithResponse() throws Exception { - String responseStr - = "{\"data\":[{\"kind\":\"vekqvgqo\",\"id\":\"ifzmpjwyivqi\",\"properties\":{\"metadata\":{\"self\":\"vhrfsphuagrt\",\"resourceName\":\"kteusqczk\",\"createdTimestamp\":\"klxubyja\",\"updatedTimestamp\":\"mmfblcqcuubgqib\",\"deletedTimestamp\":\"a\"},\"spec\":{\"name\":\"tttwgdslqxih\",\"cloud\":\"moo\",\"regionName\":\"qseypxiutcxa\",\"packages\":[\"y\",\"petogebjox\",\"lhvnhlab\",\"q\"]}}},{\"kind\":\"kzjcjbtrgae\",\"id\":\"vibr\",\"properties\":{\"metadata\":{\"self\":\"toqbeitpkxztmoob\",\"resourceName\":\"ft\",\"createdTimestamp\":\"gfcwqmpimaqxzhem\",\"updatedTimestamp\":\"h\",\"deletedTimestamp\":\"uj\"},\"spec\":{\"name\":\"wkozz\",\"cloud\":\"ulkb\",\"regionName\":\"pfajnjwltlwtjj\",\"packages\":[\"talhsnvkcdmxzr\",\"oaimlnw\",\"aaomylweazu\",\"cse\"]}}},{\"kind\":\"wwnpj\",\"id\":\"fz\",\"properties\":{\"metadata\":{\"self\":\"hwahfbousn\",\"resourceName\":\"pgfewetwlyx\",\"createdTimestamp\":\"cxy\",\"updatedTimestamp\":\"hdjhlimmbcx\",\"deletedTimestamp\":\"bcporxvxcjzhqizx\"},\"spec\":{\"name\":\"tgqscjavftjuh\",\"cloud\":\"azkmtgguwp\",\"regionName\":\"r\",\"packages\":[\"ivmmghfcfiwrxgk\",\"euvyinzqodfvpgs\"]}}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - ListRegionsSuccessResponse response = manager.organizations() - .listRegionsWithResponse("cuplcplcwkhih", "hlhzdsqtzbsrgno", - new ListAccessRequestModel().withSearchFilters(mapOf("oteyowc", "hfgmvecactxm")), - com.azure.core.util.Context.NONE) - .getValue(); - - Assertions.assertEquals("vekqvgqo", response.data().get(0).kind()); - Assertions.assertEquals("ifzmpjwyivqi", response.data().get(0).id()); - Assertions.assertEquals("vhrfsphuagrt", response.data().get(0).metadata().self()); - Assertions.assertEquals("kteusqczk", response.data().get(0).metadata().resourceName()); - Assertions.assertEquals("klxubyja", response.data().get(0).metadata().createdTimestamp()); - Assertions.assertEquals("mmfblcqcuubgqib", response.data().get(0).metadata().updatedTimestamp()); - Assertions.assertEquals("a", response.data().get(0).metadata().deletedTimestamp()); - Assertions.assertEquals("tttwgdslqxih", response.data().get(0).spec().name()); - Assertions.assertEquals("moo", response.data().get(0).spec().cloud()); - Assertions.assertEquals("qseypxiutcxa", response.data().get(0).spec().regionName()); - Assertions.assertEquals("y", response.data().get(0).spec().packages().get(0)); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListSchemaRegistryClustersMockTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListSchemaRegistryClustersMockTests.java deleted file mode 100644 index 1a4db9aaebb9..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/OrganizationsListSchemaRegistryClustersMockTests.java +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.credential.AccessToken; -import com.azure.core.http.HttpClient; -import com.azure.core.http.rest.PagedIterable; -import com.azure.core.management.AzureEnvironment; -import com.azure.core.management.profile.AzureProfile; -import com.azure.core.test.http.MockHttpResponse; -import com.azure.resourcemanager.confluent.ConfluentManager; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterRecord; -import java.nio.charset.StandardCharsets; -import java.time.OffsetDateTime; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import reactor.core.publisher.Mono; - -public final class OrganizationsListSchemaRegistryClustersMockTests { - @Test - public void testListSchemaRegistryClusters() throws Exception { - String responseStr - = "{\"value\":[{\"kind\":\"fdn\",\"id\":\"ydvfvfcjnae\",\"properties\":{\"metadata\":{\"self\":\"vhmgorffukis\",\"resourceName\":\"w\",\"createdTimestamp\":\"hwplefaxvx\",\"updatedTimestamp\":\"cbtgnhnz\",\"deletedTimestamp\":\"qxtjjfzqlqhyca\"},\"spec\":{\"name\":\"ggxdb\",\"httpEndpoint\":\"smieknlra\",\"package\":\"aawiuagydwqfb\",\"region\":{\"id\":\"rfgi\",\"related\":\"tcojocqwo\",\"resourceName\":\"nzjvusfzldm\"},\"environment\":{\"id\":\"xylfsb\",\"related\":\"adpysownbt\",\"resourceName\":\"bugrj\"},\"cloud\":\"to\"},\"status\":{\"phase\":\"isofieypefojyqd\"}}}]}"; - - HttpClient httpClient - = response -> Mono.just(new MockHttpResponse(response, 200, responseStr.getBytes(StandardCharsets.UTF_8))); - ConfluentManager manager = ConfluentManager.configure() - .withHttpClient(httpClient) - .authenticate(tokenRequestContext -> Mono.just(new AccessToken("this_is_a_token", OffsetDateTime.MAX)), - new AzureProfile("", "", AzureEnvironment.AZURE)); - - PagedIterable response = manager.organizations() - .listSchemaRegistryClusters("pavehhr", "kbunzoz", "dhcxgkmoy", 1670062911, "dyuib", - com.azure.core.util.Context.NONE); - - Assertions.assertEquals("fdn", response.iterator().next().kind()); - Assertions.assertEquals("ydvfvfcjnae", response.iterator().next().id()); - Assertions.assertEquals("vhmgorffukis", response.iterator().next().metadata().self()); - Assertions.assertEquals("w", response.iterator().next().metadata().resourceName()); - Assertions.assertEquals("hwplefaxvx", response.iterator().next().metadata().createdTimestamp()); - Assertions.assertEquals("cbtgnhnz", response.iterator().next().metadata().updatedTimestamp()); - Assertions.assertEquals("qxtjjfzqlqhyca", response.iterator().next().metadata().deletedTimestamp()); - Assertions.assertEquals("ggxdb", response.iterator().next().spec().name()); - Assertions.assertEquals("smieknlra", response.iterator().next().spec().httpEndpoint()); - Assertions.assertEquals("aawiuagydwqfb", response.iterator().next().spec().packageProperty()); - Assertions.assertEquals("rfgi", response.iterator().next().spec().region().id()); - Assertions.assertEquals("tcojocqwo", response.iterator().next().spec().region().related()); - Assertions.assertEquals("nzjvusfzldm", response.iterator().next().spec().region().resourceName()); - Assertions.assertEquals("xylfsb", response.iterator().next().spec().environment().id()); - Assertions.assertEquals("adpysownbt", response.iterator().next().spec().environment().related()); - Assertions.assertEquals("bugrj", response.iterator().next().spec().environment().resourceName()); - Assertions.assertEquals("to", response.iterator().next().spec().cloud()); - Assertions.assertEquals("isofieypefojyqd", response.iterator().next().status().phase()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionPropertiesTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionPropertiesTests.java deleted file mode 100644 index 72412f818373..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionPropertiesTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.RegionProperties; -import com.azure.resourcemanager.confluent.models.RegionSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class RegionPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RegionProperties model = BinaryData.fromString( - "{\"metadata\":{\"self\":\"uyovw\",\"resourceName\":\"bkfezzxscyhwzdgi\",\"createdTimestamp\":\"jbzbomvzzbtdcq\",\"updatedTimestamp\":\"niyujv\",\"deletedTimestamp\":\"l\"},\"spec\":{\"name\":\"hfssnrb\",\"cloud\":\"efr\",\"regionName\":\"sgaojfmwncot\",\"packages\":[\"hirctymoxoftpipi\"]}}") - .toObject(RegionProperties.class); - Assertions.assertEquals("uyovw", model.metadata().self()); - Assertions.assertEquals("bkfezzxscyhwzdgi", model.metadata().resourceName()); - Assertions.assertEquals("jbzbomvzzbtdcq", model.metadata().createdTimestamp()); - Assertions.assertEquals("niyujv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("l", model.metadata().deletedTimestamp()); - Assertions.assertEquals("hfssnrb", model.spec().name()); - Assertions.assertEquals("efr", model.spec().cloud()); - Assertions.assertEquals("sgaojfmwncot", model.spec().regionName()); - Assertions.assertEquals("hirctymoxoftpipi", model.spec().packages().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RegionProperties model = new RegionProperties() - .withMetadata(new SCMetadataEntity().withSelf("uyovw") - .withResourceName("bkfezzxscyhwzdgi") - .withCreatedTimestamp("jbzbomvzzbtdcq") - .withUpdatedTimestamp("niyujv") - .withDeletedTimestamp("l")) - .withSpec(new RegionSpecEntity().withName("hfssnrb") - .withCloud("efr") - .withRegionName("sgaojfmwncot") - .withPackages(Arrays.asList("hirctymoxoftpipi"))); - model = BinaryData.fromObject(model).toObject(RegionProperties.class); - Assertions.assertEquals("uyovw", model.metadata().self()); - Assertions.assertEquals("bkfezzxscyhwzdgi", model.metadata().resourceName()); - Assertions.assertEquals("jbzbomvzzbtdcq", model.metadata().createdTimestamp()); - Assertions.assertEquals("niyujv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("l", model.metadata().deletedTimestamp()); - Assertions.assertEquals("hfssnrb", model.spec().name()); - Assertions.assertEquals("efr", model.spec().cloud()); - Assertions.assertEquals("sgaojfmwncot", model.spec().regionName()); - Assertions.assertEquals("hirctymoxoftpipi", model.spec().packages().get(0)); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionRecordTests.java deleted file mode 100644 index 2be38ded407f..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionRecordTests.java +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.RegionRecord; -import com.azure.resourcemanager.confluent.models.RegionSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class RegionRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RegionRecord model = BinaryData.fromString( - "{\"kind\":\"qxvcxgfrpdsofb\",\"id\":\"rnsvbuswd\",\"properties\":{\"metadata\":{\"self\":\"bycnunvjsrtkf\",\"resourceName\":\"nopqgikyzirtx\",\"createdTimestamp\":\"uxzejntpsew\",\"updatedTimestamp\":\"oi\",\"deletedTimestamp\":\"ukry\"},\"spec\":{\"name\":\"qmi\",\"cloud\":\"xorgg\",\"regionName\":\"hyaomtbghhavgr\",\"packages\":[\"fo\",\"jzhpjbibgjmfx\",\"mv\"]}}}") - .toObject(RegionRecord.class); - Assertions.assertEquals("qxvcxgfrpdsofb", model.kind()); - Assertions.assertEquals("rnsvbuswd", model.id()); - Assertions.assertEquals("bycnunvjsrtkf", model.metadata().self()); - Assertions.assertEquals("nopqgikyzirtx", model.metadata().resourceName()); - Assertions.assertEquals("uxzejntpsew", model.metadata().createdTimestamp()); - Assertions.assertEquals("oi", model.metadata().updatedTimestamp()); - Assertions.assertEquals("ukry", model.metadata().deletedTimestamp()); - Assertions.assertEquals("qmi", model.spec().name()); - Assertions.assertEquals("xorgg", model.spec().cloud()); - Assertions.assertEquals("hyaomtbghhavgr", model.spec().regionName()); - Assertions.assertEquals("fo", model.spec().packages().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RegionRecord model = new RegionRecord().withKind("qxvcxgfrpdsofb") - .withId("rnsvbuswd") - .withMetadata(new SCMetadataEntity().withSelf("bycnunvjsrtkf") - .withResourceName("nopqgikyzirtx") - .withCreatedTimestamp("uxzejntpsew") - .withUpdatedTimestamp("oi") - .withDeletedTimestamp("ukry")) - .withSpec(new RegionSpecEntity().withName("qmi") - .withCloud("xorgg") - .withRegionName("hyaomtbghhavgr") - .withPackages(Arrays.asList("fo", "jzhpjbibgjmfx", "mv"))); - model = BinaryData.fromObject(model).toObject(RegionRecord.class); - Assertions.assertEquals("qxvcxgfrpdsofb", model.kind()); - Assertions.assertEquals("rnsvbuswd", model.id()); - Assertions.assertEquals("bycnunvjsrtkf", model.metadata().self()); - Assertions.assertEquals("nopqgikyzirtx", model.metadata().resourceName()); - Assertions.assertEquals("uxzejntpsew", model.metadata().createdTimestamp()); - Assertions.assertEquals("oi", model.metadata().updatedTimestamp()); - Assertions.assertEquals("ukry", model.metadata().deletedTimestamp()); - Assertions.assertEquals("qmi", model.spec().name()); - Assertions.assertEquals("xorgg", model.spec().cloud()); - Assertions.assertEquals("hyaomtbghhavgr", model.spec().regionName()); - Assertions.assertEquals("fo", model.spec().packages().get(0)); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionSpecEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionSpecEntityTests.java deleted file mode 100644 index 741b2c7f5f4a..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RegionSpecEntityTests.java +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.RegionSpecEntity; -import java.util.Arrays; -import org.junit.jupiter.api.Assertions; - -public final class RegionSpecEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RegionSpecEntity model = BinaryData.fromString( - "{\"name\":\"czuhxacpqjlihh\",\"cloud\":\"spskasdvlmfwdgz\",\"regionName\":\"lucvpam\",\"packages\":[\"euzvx\",\"risjnhnytxifqjz\",\"xmrhu\"]}") - .toObject(RegionSpecEntity.class); - Assertions.assertEquals("czuhxacpqjlihh", model.name()); - Assertions.assertEquals("spskasdvlmfwdgz", model.cloud()); - Assertions.assertEquals("lucvpam", model.regionName()); - Assertions.assertEquals("euzvx", model.packages().get(0)); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RegionSpecEntity model = new RegionSpecEntity().withName("czuhxacpqjlihh") - .withCloud("spskasdvlmfwdgz") - .withRegionName("lucvpam") - .withPackages(Arrays.asList("euzvx", "risjnhnytxifqjz", "xmrhu")); - model = BinaryData.fromObject(model).toObject(RegionSpecEntity.class); - Assertions.assertEquals("czuhxacpqjlihh", model.name()); - Assertions.assertEquals("spskasdvlmfwdgz", model.cloud()); - Assertions.assertEquals("lucvpam", model.regionName()); - Assertions.assertEquals("euzvx", model.packages().get(0)); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RoleBindingRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RoleBindingRecordTests.java deleted file mode 100644 index 9dc0ebf0fe52..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/RoleBindingRecordTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.RoleBindingRecord; -import org.junit.jupiter.api.Assertions; - -public final class RoleBindingRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - RoleBindingRecord model = BinaryData.fromString( - "{\"kind\":\"vhelxprglyatdd\",\"id\":\"cbcuejrjxgciqi\",\"metadata\":{\"self\":\"osx\",\"resource_name\":\"qrhzoymibmrqyib\",\"created_at\":\"wfluszdt\",\"updated_at\":\"rkwofyyvoqa\",\"deleted_at\":\"iexpbtgiwbwo\"},\"principal\":\"washr\",\"role_name\":\"tkcnqxwb\",\"crn_pattern\":\"kulpiujwaasi\"}") - .toObject(RoleBindingRecord.class); - Assertions.assertEquals("vhelxprglyatdd", model.kind()); - Assertions.assertEquals("cbcuejrjxgciqi", model.id()); - Assertions.assertEquals("osx", model.metadata().self()); - Assertions.assertEquals("qrhzoymibmrqyib", model.metadata().resourceName()); - Assertions.assertEquals("wfluszdt", model.metadata().createdAt()); - Assertions.assertEquals("rkwofyyvoqa", model.metadata().updatedAt()); - Assertions.assertEquals("iexpbtgiwbwo", model.metadata().deletedAt()); - Assertions.assertEquals("washr", model.principal()); - Assertions.assertEquals("tkcnqxwb", model.roleName()); - Assertions.assertEquals("kulpiujwaasi", model.crnPattern()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - RoleBindingRecord model = new RoleBindingRecord().withKind("vhelxprglyatdd") - .withId("cbcuejrjxgciqi") - .withMetadata(new MetadataEntity().withSelf("osx") - .withResourceName("qrhzoymibmrqyib") - .withCreatedAt("wfluszdt") - .withUpdatedAt("rkwofyyvoqa") - .withDeletedAt("iexpbtgiwbwo")) - .withPrincipal("washr") - .withRoleName("tkcnqxwb") - .withCrnPattern("kulpiujwaasi"); - model = BinaryData.fromObject(model).toObject(RoleBindingRecord.class); - Assertions.assertEquals("vhelxprglyatdd", model.kind()); - Assertions.assertEquals("cbcuejrjxgciqi", model.id()); - Assertions.assertEquals("osx", model.metadata().self()); - Assertions.assertEquals("qrhzoymibmrqyib", model.metadata().resourceName()); - Assertions.assertEquals("wfluszdt", model.metadata().createdAt()); - Assertions.assertEquals("rkwofyyvoqa", model.metadata().updatedAt()); - Assertions.assertEquals("iexpbtgiwbwo", model.metadata().deletedAt()); - Assertions.assertEquals("washr", model.principal()); - Assertions.assertEquals("tkcnqxwb", model.roleName()); - Assertions.assertEquals("kulpiujwaasi", model.crnPattern()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterByokEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterByokEntityTests.java deleted file mode 100644 index 399d3e5c8097..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterByokEntityTests.java +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SCClusterByokEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCClusterByokEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCClusterByokEntity model - = BinaryData.fromString("{\"id\":\"hmtnvy\",\"related\":\"atkzwpcnpw\",\"resourceName\":\"jaesgvvsccya\"}") - .toObject(SCClusterByokEntity.class); - Assertions.assertEquals("hmtnvy", model.id()); - Assertions.assertEquals("atkzwpcnpw", model.related()); - Assertions.assertEquals("jaesgvvsccya", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCClusterByokEntity model - = new SCClusterByokEntity().withId("hmtnvy").withRelated("atkzwpcnpw").withResourceName("jaesgvvsccya"); - model = BinaryData.fromObject(model).toObject(SCClusterByokEntity.class); - Assertions.assertEquals("hmtnvy", model.id()); - Assertions.assertEquals("atkzwpcnpw", model.related()); - Assertions.assertEquals("jaesgvvsccya", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterNetworkEnvironmentEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterNetworkEnvironmentEntityTests.java deleted file mode 100644 index 8423f8be60c6..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterNetworkEnvironmentEntityTests.java +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCClusterNetworkEnvironmentEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCClusterNetworkEnvironmentEntity model = BinaryData.fromString( - "{\"id\":\"zdmsyqtfi\",\"environment\":\"hbotzingamvppho\",\"related\":\"qzudphq\",\"resourceName\":\"vdkfwynwcvtbvk\"}") - .toObject(SCClusterNetworkEnvironmentEntity.class); - Assertions.assertEquals("zdmsyqtfi", model.id()); - Assertions.assertEquals("hbotzingamvppho", model.environment()); - Assertions.assertEquals("qzudphq", model.related()); - Assertions.assertEquals("vdkfwynwcvtbvk", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCClusterNetworkEnvironmentEntity model = new SCClusterNetworkEnvironmentEntity().withId("zdmsyqtfi") - .withEnvironment("hbotzingamvppho") - .withRelated("qzudphq") - .withResourceName("vdkfwynwcvtbvk"); - model = BinaryData.fromObject(model).toObject(SCClusterNetworkEnvironmentEntity.class); - Assertions.assertEquals("zdmsyqtfi", model.id()); - Assertions.assertEquals("hbotzingamvppho", model.environment()); - Assertions.assertEquals("qzudphq", model.related()); - Assertions.assertEquals("vdkfwynwcvtbvk", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterRecordInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterRecordInnerTests.java deleted file mode 100644 index b9b6ea8b2f0e..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterRecordInnerTests.java +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SCClusterRecordInner; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.ClusterStatusEntity; -import com.azure.resourcemanager.confluent.models.SCClusterByokEntity; -import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCClusterRecordInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCClusterRecordInner model = BinaryData.fromString( - "{\"kind\":\"mnguxawqaldsyu\",\"id\":\"imerqfobwyznk\",\"properties\":{\"metadata\":{\"self\":\"t\",\"resourceName\":\"fhpagmhrskdsnf\",\"createdTimestamp\":\"doakgtdlmkkzevdl\",\"updatedTimestamp\":\"wpusdsttwvogv\",\"deletedTimestamp\":\"ejdcngqqmoakuf\"},\"spec\":{\"name\":\"zr\",\"availability\":\"dgrtwaenuuzkopbm\",\"cloud\":\"rfdwoyu\",\"zone\":\"ziuiefozbhdm\",\"region\":\"l\",\"kafkaBootstrapEndpoint\":\"qhoftrmaequiah\",\"httpEndpoint\":\"cslfaoqzpiyylha\",\"apiEndpoint\":\"swhccsphk\",\"config\":{\"kind\":\"witqscywuggwoluh\"},\"environment\":{\"id\":\"wem\",\"environment\":\"i\",\"related\":\"brgz\",\"resourceName\":\"msweypqwdxggicc\"},\"network\":{\"id\":\"qhuexm\",\"environment\":\"tlstvlzywem\",\"related\":\"rncsdtclu\",\"resourceName\":\"ypbsfgytguslfead\"},\"byok\":{\"id\":\"qukyhejhzi\",\"related\":\"gfpelolppvksrpqv\",\"resourceName\":\"zraehtwd\"}},\"status\":{\"phase\":\"tswiby\",\"cku\":403427600}},\"name\":\"bhshfwpracstwity\"}") - .toObject(SCClusterRecordInner.class); - Assertions.assertEquals("mnguxawqaldsyu", model.kind()); - Assertions.assertEquals("imerqfobwyznk", model.id()); - Assertions.assertEquals("bhshfwpracstwity", model.name()); - Assertions.assertEquals("t", model.metadata().self()); - Assertions.assertEquals("fhpagmhrskdsnf", model.metadata().resourceName()); - Assertions.assertEquals("doakgtdlmkkzevdl", model.metadata().createdTimestamp()); - Assertions.assertEquals("wpusdsttwvogv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("ejdcngqqmoakuf", model.metadata().deletedTimestamp()); - Assertions.assertEquals("zr", model.spec().name()); - Assertions.assertEquals("dgrtwaenuuzkopbm", model.spec().availability()); - Assertions.assertEquals("rfdwoyu", model.spec().cloud()); - Assertions.assertEquals("ziuiefozbhdm", model.spec().zone()); - Assertions.assertEquals("l", model.spec().region()); - Assertions.assertEquals("qhoftrmaequiah", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("cslfaoqzpiyylha", model.spec().httpEndpoint()); - Assertions.assertEquals("swhccsphk", model.spec().apiEndpoint()); - Assertions.assertEquals("witqscywuggwoluh", model.spec().config().kind()); - Assertions.assertEquals("wem", model.spec().environment().id()); - Assertions.assertEquals("i", model.spec().environment().environment()); - Assertions.assertEquals("brgz", model.spec().environment().related()); - Assertions.assertEquals("msweypqwdxggicc", model.spec().environment().resourceName()); - Assertions.assertEquals("qhuexm", model.spec().network().id()); - Assertions.assertEquals("tlstvlzywem", model.spec().network().environment()); - Assertions.assertEquals("rncsdtclu", model.spec().network().related()); - Assertions.assertEquals("ypbsfgytguslfead", model.spec().network().resourceName()); - Assertions.assertEquals("qukyhejhzi", model.spec().byok().id()); - Assertions.assertEquals("gfpelolppvksrpqv", model.spec().byok().related()); - Assertions.assertEquals("zraehtwd", model.spec().byok().resourceName()); - Assertions.assertEquals("tswiby", model.status().phase()); - Assertions.assertEquals(403427600, model.status().cku()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCClusterRecordInner model = new SCClusterRecordInner().withKind("mnguxawqaldsyu") - .withId("imerqfobwyznk") - .withName("bhshfwpracstwity") - .withMetadata(new SCMetadataEntity().withSelf("t") - .withResourceName("fhpagmhrskdsnf") - .withCreatedTimestamp("doakgtdlmkkzevdl") - .withUpdatedTimestamp("wpusdsttwvogv") - .withDeletedTimestamp("ejdcngqqmoakuf")) - .withSpec(new SCClusterSpecEntity().withName("zr") - .withAvailability("dgrtwaenuuzkopbm") - .withCloud("rfdwoyu") - .withZone("ziuiefozbhdm") - .withRegion("l") - .withKafkaBootstrapEndpoint("qhoftrmaequiah") - .withHttpEndpoint("cslfaoqzpiyylha") - .withApiEndpoint("swhccsphk") - .withConfig(new ClusterConfigEntity().withKind("witqscywuggwoluh")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("wem") - .withEnvironment("i") - .withRelated("brgz") - .withResourceName("msweypqwdxggicc")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("qhuexm") - .withEnvironment("tlstvlzywem") - .withRelated("rncsdtclu") - .withResourceName("ypbsfgytguslfead")) - .withByok(new SCClusterByokEntity().withId("qukyhejhzi") - .withRelated("gfpelolppvksrpqv") - .withResourceName("zraehtwd"))) - .withStatus(new ClusterStatusEntity().withPhase("tswiby").withCku(403427600)); - model = BinaryData.fromObject(model).toObject(SCClusterRecordInner.class); - Assertions.assertEquals("mnguxawqaldsyu", model.kind()); - Assertions.assertEquals("imerqfobwyznk", model.id()); - Assertions.assertEquals("bhshfwpracstwity", model.name()); - Assertions.assertEquals("t", model.metadata().self()); - Assertions.assertEquals("fhpagmhrskdsnf", model.metadata().resourceName()); - Assertions.assertEquals("doakgtdlmkkzevdl", model.metadata().createdTimestamp()); - Assertions.assertEquals("wpusdsttwvogv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("ejdcngqqmoakuf", model.metadata().deletedTimestamp()); - Assertions.assertEquals("zr", model.spec().name()); - Assertions.assertEquals("dgrtwaenuuzkopbm", model.spec().availability()); - Assertions.assertEquals("rfdwoyu", model.spec().cloud()); - Assertions.assertEquals("ziuiefozbhdm", model.spec().zone()); - Assertions.assertEquals("l", model.spec().region()); - Assertions.assertEquals("qhoftrmaequiah", model.spec().kafkaBootstrapEndpoint()); - Assertions.assertEquals("cslfaoqzpiyylha", model.spec().httpEndpoint()); - Assertions.assertEquals("swhccsphk", model.spec().apiEndpoint()); - Assertions.assertEquals("witqscywuggwoluh", model.spec().config().kind()); - Assertions.assertEquals("wem", model.spec().environment().id()); - Assertions.assertEquals("i", model.spec().environment().environment()); - Assertions.assertEquals("brgz", model.spec().environment().related()); - Assertions.assertEquals("msweypqwdxggicc", model.spec().environment().resourceName()); - Assertions.assertEquals("qhuexm", model.spec().network().id()); - Assertions.assertEquals("tlstvlzywem", model.spec().network().environment()); - Assertions.assertEquals("rncsdtclu", model.spec().network().related()); - Assertions.assertEquals("ypbsfgytguslfead", model.spec().network().resourceName()); - Assertions.assertEquals("qukyhejhzi", model.spec().byok().id()); - Assertions.assertEquals("gfpelolppvksrpqv", model.spec().byok().related()); - Assertions.assertEquals("zraehtwd", model.spec().byok().resourceName()); - Assertions.assertEquals("tswiby", model.status().phase()); - Assertions.assertEquals(403427600, model.status().cku()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterSpecEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterSpecEntityTests.java deleted file mode 100644 index 1d801fbc766c..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCClusterSpecEntityTests.java +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.ClusterConfigEntity; -import com.azure.resourcemanager.confluent.models.SCClusterByokEntity; -import com.azure.resourcemanager.confluent.models.SCClusterNetworkEnvironmentEntity; -import com.azure.resourcemanager.confluent.models.SCClusterSpecEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCClusterSpecEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCClusterSpecEntity model = BinaryData.fromString( - "{\"name\":\"ytisibir\",\"availability\":\"pikpz\",\"cloud\":\"ejzanlfz\",\"zone\":\"av\",\"region\":\"bzonok\",\"kafkaBootstrapEndpoint\":\"rjqc\",\"httpEndpoint\":\"gzpfrla\",\"apiEndpoint\":\"zrnw\",\"config\":{\"kind\":\"ndfpwpj\"},\"environment\":{\"id\":\"bt\",\"environment\":\"flsjc\",\"related\":\"szfjvfbgofelja\",\"resourceName\":\"qmqhldvriii\"},\"network\":{\"id\":\"al\",\"environment\":\"fk\",\"related\":\"vsexsowuelu\",\"resourceName\":\"hahhxvrhmzkwpj\"},\"byok\":{\"id\":\"spughftqsxhq\",\"related\":\"j\",\"resourceName\":\"kndxdigrjgu\"}}") - .toObject(SCClusterSpecEntity.class); - Assertions.assertEquals("ytisibir", model.name()); - Assertions.assertEquals("pikpz", model.availability()); - Assertions.assertEquals("ejzanlfz", model.cloud()); - Assertions.assertEquals("av", model.zone()); - Assertions.assertEquals("bzonok", model.region()); - Assertions.assertEquals("rjqc", model.kafkaBootstrapEndpoint()); - Assertions.assertEquals("gzpfrla", model.httpEndpoint()); - Assertions.assertEquals("zrnw", model.apiEndpoint()); - Assertions.assertEquals("ndfpwpj", model.config().kind()); - Assertions.assertEquals("bt", model.environment().id()); - Assertions.assertEquals("flsjc", model.environment().environment()); - Assertions.assertEquals("szfjvfbgofelja", model.environment().related()); - Assertions.assertEquals("qmqhldvriii", model.environment().resourceName()); - Assertions.assertEquals("al", model.network().id()); - Assertions.assertEquals("fk", model.network().environment()); - Assertions.assertEquals("vsexsowuelu", model.network().related()); - Assertions.assertEquals("hahhxvrhmzkwpj", model.network().resourceName()); - Assertions.assertEquals("spughftqsxhq", model.byok().id()); - Assertions.assertEquals("j", model.byok().related()); - Assertions.assertEquals("kndxdigrjgu", model.byok().resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCClusterSpecEntity model = new SCClusterSpecEntity().withName("ytisibir") - .withAvailability("pikpz") - .withCloud("ejzanlfz") - .withZone("av") - .withRegion("bzonok") - .withKafkaBootstrapEndpoint("rjqc") - .withHttpEndpoint("gzpfrla") - .withApiEndpoint("zrnw") - .withConfig(new ClusterConfigEntity().withKind("ndfpwpj")) - .withEnvironment(new SCClusterNetworkEnvironmentEntity().withId("bt") - .withEnvironment("flsjc") - .withRelated("szfjvfbgofelja") - .withResourceName("qmqhldvriii")) - .withNetwork(new SCClusterNetworkEnvironmentEntity().withId("al") - .withEnvironment("fk") - .withRelated("vsexsowuelu") - .withResourceName("hahhxvrhmzkwpj")) - .withByok( - new SCClusterByokEntity().withId("spughftqsxhq").withRelated("j").withResourceName("kndxdigrjgu")); - model = BinaryData.fromObject(model).toObject(SCClusterSpecEntity.class); - Assertions.assertEquals("ytisibir", model.name()); - Assertions.assertEquals("pikpz", model.availability()); - Assertions.assertEquals("ejzanlfz", model.cloud()); - Assertions.assertEquals("av", model.zone()); - Assertions.assertEquals("bzonok", model.region()); - Assertions.assertEquals("rjqc", model.kafkaBootstrapEndpoint()); - Assertions.assertEquals("gzpfrla", model.httpEndpoint()); - Assertions.assertEquals("zrnw", model.apiEndpoint()); - Assertions.assertEquals("ndfpwpj", model.config().kind()); - Assertions.assertEquals("bt", model.environment().id()); - Assertions.assertEquals("flsjc", model.environment().environment()); - Assertions.assertEquals("szfjvfbgofelja", model.environment().related()); - Assertions.assertEquals("qmqhldvriii", model.environment().resourceName()); - Assertions.assertEquals("al", model.network().id()); - Assertions.assertEquals("fk", model.network().environment()); - Assertions.assertEquals("vsexsowuelu", model.network().related()); - Assertions.assertEquals("hahhxvrhmzkwpj", model.network().resourceName()); - Assertions.assertEquals("spughftqsxhq", model.byok().id()); - Assertions.assertEquals("j", model.byok().related()); - Assertions.assertEquals("kndxdigrjgu", model.byok().resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCEnvironmentRecordInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCEnvironmentRecordInnerTests.java deleted file mode 100644 index 31f27bd25d83..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCEnvironmentRecordInnerTests.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SCEnvironmentRecordInner; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCEnvironmentRecordInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCEnvironmentRecordInner model = BinaryData.fromString( - "{\"kind\":\"lyzrpzbchckqqzqi\",\"id\":\"iysui\",\"properties\":{\"metadata\":{\"self\":\"ked\",\"resourceName\":\"trwyhqmib\",\"createdTimestamp\":\"hwit\",\"updatedTimestamp\":\"ypyynpcdpumnzg\",\"deletedTimestamp\":\"z\"}},\"name\":\"abikns\"}") - .toObject(SCEnvironmentRecordInner.class); - Assertions.assertEquals("lyzrpzbchckqqzqi", model.kind()); - Assertions.assertEquals("iysui", model.id()); - Assertions.assertEquals("abikns", model.name()); - Assertions.assertEquals("ked", model.metadata().self()); - Assertions.assertEquals("trwyhqmib", model.metadata().resourceName()); - Assertions.assertEquals("hwit", model.metadata().createdTimestamp()); - Assertions.assertEquals("ypyynpcdpumnzg", model.metadata().updatedTimestamp()); - Assertions.assertEquals("z", model.metadata().deletedTimestamp()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCEnvironmentRecordInner model = new SCEnvironmentRecordInner().withKind("lyzrpzbchckqqzqi") - .withId("iysui") - .withName("abikns") - .withMetadata(new SCMetadataEntity().withSelf("ked") - .withResourceName("trwyhqmib") - .withCreatedTimestamp("hwit") - .withUpdatedTimestamp("ypyynpcdpumnzg") - .withDeletedTimestamp("z")); - model = BinaryData.fromObject(model).toObject(SCEnvironmentRecordInner.class); - Assertions.assertEquals("lyzrpzbchckqqzqi", model.kind()); - Assertions.assertEquals("iysui", model.id()); - Assertions.assertEquals("abikns", model.name()); - Assertions.assertEquals("ked", model.metadata().self()); - Assertions.assertEquals("trwyhqmib", model.metadata().resourceName()); - Assertions.assertEquals("hwit", model.metadata().createdTimestamp()); - Assertions.assertEquals("ypyynpcdpumnzg", model.metadata().updatedTimestamp()); - Assertions.assertEquals("z", model.metadata().deletedTimestamp()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCMetadataEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCMetadataEntityTests.java deleted file mode 100644 index 655af7401fca..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SCMetadataEntityTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import org.junit.jupiter.api.Assertions; - -public final class SCMetadataEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SCMetadataEntity model = BinaryData.fromString( - "{\"self\":\"gxqquezik\",\"resourceName\":\"ggxkallatmelwuip\",\"createdTimestamp\":\"cjzkzivgvvcna\",\"updatedTimestamp\":\"hyrnxxmu\",\"deletedTimestamp\":\"dndrdvstkwqqtche\"}") - .toObject(SCMetadataEntity.class); - Assertions.assertEquals("gxqquezik", model.self()); - Assertions.assertEquals("ggxkallatmelwuip", model.resourceName()); - Assertions.assertEquals("cjzkzivgvvcna", model.createdTimestamp()); - Assertions.assertEquals("hyrnxxmu", model.updatedTimestamp()); - Assertions.assertEquals("dndrdvstkwqqtche", model.deletedTimestamp()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SCMetadataEntity model = new SCMetadataEntity().withSelf("gxqquezik") - .withResourceName("ggxkallatmelwuip") - .withCreatedTimestamp("cjzkzivgvvcna") - .withUpdatedTimestamp("hyrnxxmu") - .withDeletedTimestamp("dndrdvstkwqqtche"); - model = BinaryData.fromObject(model).toObject(SCMetadataEntity.class); - Assertions.assertEquals("gxqquezik", model.self()); - Assertions.assertEquals("ggxkallatmelwuip", model.resourceName()); - Assertions.assertEquals("cjzkzivgvvcna", model.createdTimestamp()); - Assertions.assertEquals("hyrnxxmu", model.updatedTimestamp()); - Assertions.assertEquals("dndrdvstkwqqtche", model.deletedTimestamp()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterEnvironmentRegionEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterEnvironmentRegionEntityTests.java deleted file mode 100644 index 93e21bf1f1d4..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterEnvironmentRegionEntityTests.java +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterEnvironmentRegionEntity; -import org.junit.jupiter.api.Assertions; - -public final class SchemaRegistryClusterEnvironmentRegionEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SchemaRegistryClusterEnvironmentRegionEntity model - = BinaryData.fromString("{\"id\":\"xum\",\"related\":\"ton\",\"resourceName\":\"jl\"}") - .toObject(SchemaRegistryClusterEnvironmentRegionEntity.class); - Assertions.assertEquals("xum", model.id()); - Assertions.assertEquals("ton", model.related()); - Assertions.assertEquals("jl", model.resourceName()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SchemaRegistryClusterEnvironmentRegionEntity model - = new SchemaRegistryClusterEnvironmentRegionEntity().withId("xum") - .withRelated("ton") - .withResourceName("jl"); - model = BinaryData.fromObject(model).toObject(SchemaRegistryClusterEnvironmentRegionEntity.class); - Assertions.assertEquals("xum", model.id()); - Assertions.assertEquals("ton", model.related()); - Assertions.assertEquals("jl", model.resourceName()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterPropertiesTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterPropertiesTests.java deleted file mode 100644 index f1da24a85d4d..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterPropertiesTests.java +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SchemaRegistryClusterProperties; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterEnvironmentRegionEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterStatusEntity; -import org.junit.jupiter.api.Assertions; - -public final class SchemaRegistryClusterPropertiesTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SchemaRegistryClusterProperties model = BinaryData.fromString( - "{\"metadata\":{\"self\":\"olbq\",\"resourceName\":\"uzlm\",\"createdTimestamp\":\"elfk\",\"updatedTimestamp\":\"plcrpwjxeznoig\",\"deletedTimestamp\":\"njwmwkpnbsazejj\"},\"spec\":{\"name\":\"agfhsxtta\",\"httpEndpoint\":\"zxnfaaz\",\"package\":\"dtnkdmkq\",\"region\":{\"id\":\"wuenvr\",\"related\":\"yo\",\"resourceName\":\"ibreb\"},\"environment\":{\"id\":\"ysjkixqtnqttez\",\"related\":\"fffiak\",\"resourceName\":\"pqqmted\"},\"cloud\":\"mmji\"},\"status\":{\"phase\":\"ozphvwauyqncygu\"}}") - .toObject(SchemaRegistryClusterProperties.class); - Assertions.assertEquals("olbq", model.metadata().self()); - Assertions.assertEquals("uzlm", model.metadata().resourceName()); - Assertions.assertEquals("elfk", model.metadata().createdTimestamp()); - Assertions.assertEquals("plcrpwjxeznoig", model.metadata().updatedTimestamp()); - Assertions.assertEquals("njwmwkpnbsazejj", model.metadata().deletedTimestamp()); - Assertions.assertEquals("agfhsxtta", model.spec().name()); - Assertions.assertEquals("zxnfaaz", model.spec().httpEndpoint()); - Assertions.assertEquals("dtnkdmkq", model.spec().packageProperty()); - Assertions.assertEquals("wuenvr", model.spec().region().id()); - Assertions.assertEquals("yo", model.spec().region().related()); - Assertions.assertEquals("ibreb", model.spec().region().resourceName()); - Assertions.assertEquals("ysjkixqtnqttez", model.spec().environment().id()); - Assertions.assertEquals("fffiak", model.spec().environment().related()); - Assertions.assertEquals("pqqmted", model.spec().environment().resourceName()); - Assertions.assertEquals("mmji", model.spec().cloud()); - Assertions.assertEquals("ozphvwauyqncygu", model.status().phase()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SchemaRegistryClusterProperties model = new SchemaRegistryClusterProperties() - .withMetadata(new SCMetadataEntity().withSelf("olbq") - .withResourceName("uzlm") - .withCreatedTimestamp("elfk") - .withUpdatedTimestamp("plcrpwjxeznoig") - .withDeletedTimestamp("njwmwkpnbsazejj")) - .withSpec(new SchemaRegistryClusterSpecEntity().withName("agfhsxtta") - .withHttpEndpoint("zxnfaaz") - .withPackageProperty("dtnkdmkq") - .withRegion(new SchemaRegistryClusterEnvironmentRegionEntity().withId("wuenvr") - .withRelated("yo") - .withResourceName("ibreb")) - .withEnvironment(new SchemaRegistryClusterEnvironmentRegionEntity().withId("ysjkixqtnqttez") - .withRelated("fffiak") - .withResourceName("pqqmted")) - .withCloud("mmji")) - .withStatus(new SchemaRegistryClusterStatusEntity().withPhase("ozphvwauyqncygu")); - model = BinaryData.fromObject(model).toObject(SchemaRegistryClusterProperties.class); - Assertions.assertEquals("olbq", model.metadata().self()); - Assertions.assertEquals("uzlm", model.metadata().resourceName()); - Assertions.assertEquals("elfk", model.metadata().createdTimestamp()); - Assertions.assertEquals("plcrpwjxeznoig", model.metadata().updatedTimestamp()); - Assertions.assertEquals("njwmwkpnbsazejj", model.metadata().deletedTimestamp()); - Assertions.assertEquals("agfhsxtta", model.spec().name()); - Assertions.assertEquals("zxnfaaz", model.spec().httpEndpoint()); - Assertions.assertEquals("dtnkdmkq", model.spec().packageProperty()); - Assertions.assertEquals("wuenvr", model.spec().region().id()); - Assertions.assertEquals("yo", model.spec().region().related()); - Assertions.assertEquals("ibreb", model.spec().region().resourceName()); - Assertions.assertEquals("ysjkixqtnqttez", model.spec().environment().id()); - Assertions.assertEquals("fffiak", model.spec().environment().related()); - Assertions.assertEquals("pqqmted", model.spec().environment().resourceName()); - Assertions.assertEquals("mmji", model.spec().cloud()); - Assertions.assertEquals("ozphvwauyqncygu", model.status().phase()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterRecordInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterRecordInnerTests.java deleted file mode 100644 index 4c55c7094f64..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterRecordInnerTests.java +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.SchemaRegistryClusterRecordInner; -import com.azure.resourcemanager.confluent.models.SCMetadataEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterEnvironmentRegionEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterSpecEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterStatusEntity; -import org.junit.jupiter.api.Assertions; - -public final class SchemaRegistryClusterRecordInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SchemaRegistryClusterRecordInner model = BinaryData.fromString( - "{\"kind\":\"kvtkkg\",\"id\":\"qwjygvja\",\"properties\":{\"metadata\":{\"self\":\"mhvkzuhbxvvyh\",\"resourceName\":\"opbyrqufegxu\",\"createdTimestamp\":\"zfbn\",\"updatedTimestamp\":\"mctlpdngitv\",\"deletedTimestamp\":\"mhrixkwmyijejve\"},\"spec\":{\"name\":\"bpnaixexccbdre\",\"httpEndpoint\":\"hcexdrrvqa\",\"package\":\"kghtpwijnh\",\"region\":{\"id\":\"vfycxzb\",\"related\":\"oowvrv\",\"resourceName\":\"gjqppy\"},\"environment\":{\"id\":\"ronzmyhgfip\",\"related\":\"xkmcwaekrrjre\",\"resourceName\":\"xt\"},\"cloud\":\"umh\"},\"status\":{\"phase\":\"ikkx\"}}}") - .toObject(SchemaRegistryClusterRecordInner.class); - Assertions.assertEquals("kvtkkg", model.kind()); - Assertions.assertEquals("qwjygvja", model.id()); - Assertions.assertEquals("mhvkzuhbxvvyh", model.metadata().self()); - Assertions.assertEquals("opbyrqufegxu", model.metadata().resourceName()); - Assertions.assertEquals("zfbn", model.metadata().createdTimestamp()); - Assertions.assertEquals("mctlpdngitv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("mhrixkwmyijejve", model.metadata().deletedTimestamp()); - Assertions.assertEquals("bpnaixexccbdre", model.spec().name()); - Assertions.assertEquals("hcexdrrvqa", model.spec().httpEndpoint()); - Assertions.assertEquals("kghtpwijnh", model.spec().packageProperty()); - Assertions.assertEquals("vfycxzb", model.spec().region().id()); - Assertions.assertEquals("oowvrv", model.spec().region().related()); - Assertions.assertEquals("gjqppy", model.spec().region().resourceName()); - Assertions.assertEquals("ronzmyhgfip", model.spec().environment().id()); - Assertions.assertEquals("xkmcwaekrrjre", model.spec().environment().related()); - Assertions.assertEquals("xt", model.spec().environment().resourceName()); - Assertions.assertEquals("umh", model.spec().cloud()); - Assertions.assertEquals("ikkx", model.status().phase()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SchemaRegistryClusterRecordInner model = new SchemaRegistryClusterRecordInner().withKind("kvtkkg") - .withId("qwjygvja") - .withMetadata(new SCMetadataEntity().withSelf("mhvkzuhbxvvyh") - .withResourceName("opbyrqufegxu") - .withCreatedTimestamp("zfbn") - .withUpdatedTimestamp("mctlpdngitv") - .withDeletedTimestamp("mhrixkwmyijejve")) - .withSpec(new SchemaRegistryClusterSpecEntity().withName("bpnaixexccbdre") - .withHttpEndpoint("hcexdrrvqa") - .withPackageProperty("kghtpwijnh") - .withRegion(new SchemaRegistryClusterEnvironmentRegionEntity().withId("vfycxzb") - .withRelated("oowvrv") - .withResourceName("gjqppy")) - .withEnvironment(new SchemaRegistryClusterEnvironmentRegionEntity().withId("ronzmyhgfip") - .withRelated("xkmcwaekrrjre") - .withResourceName("xt")) - .withCloud("umh")) - .withStatus(new SchemaRegistryClusterStatusEntity().withPhase("ikkx")); - model = BinaryData.fromObject(model).toObject(SchemaRegistryClusterRecordInner.class); - Assertions.assertEquals("kvtkkg", model.kind()); - Assertions.assertEquals("qwjygvja", model.id()); - Assertions.assertEquals("mhvkzuhbxvvyh", model.metadata().self()); - Assertions.assertEquals("opbyrqufegxu", model.metadata().resourceName()); - Assertions.assertEquals("zfbn", model.metadata().createdTimestamp()); - Assertions.assertEquals("mctlpdngitv", model.metadata().updatedTimestamp()); - Assertions.assertEquals("mhrixkwmyijejve", model.metadata().deletedTimestamp()); - Assertions.assertEquals("bpnaixexccbdre", model.spec().name()); - Assertions.assertEquals("hcexdrrvqa", model.spec().httpEndpoint()); - Assertions.assertEquals("kghtpwijnh", model.spec().packageProperty()); - Assertions.assertEquals("vfycxzb", model.spec().region().id()); - Assertions.assertEquals("oowvrv", model.spec().region().related()); - Assertions.assertEquals("gjqppy", model.spec().region().resourceName()); - Assertions.assertEquals("ronzmyhgfip", model.spec().environment().id()); - Assertions.assertEquals("xkmcwaekrrjre", model.spec().environment().related()); - Assertions.assertEquals("xt", model.spec().environment().resourceName()); - Assertions.assertEquals("umh", model.spec().cloud()); - Assertions.assertEquals("ikkx", model.status().phase()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterSpecEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterSpecEntityTests.java deleted file mode 100644 index 15c7036d0cc8..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterSpecEntityTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterEnvironmentRegionEntity; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterSpecEntity; -import org.junit.jupiter.api.Assertions; - -public final class SchemaRegistryClusterSpecEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SchemaRegistryClusterSpecEntity model = BinaryData.fromString( - "{\"name\":\"vipmdscwxqupevzh\",\"httpEndpoint\":\"totxhojujb\",\"package\":\"elmcuvhixbjxyfw\",\"region\":{\"id\":\"r\",\"related\":\"o\",\"resourceName\":\"ttpkiwkkbnujrywv\"},\"environment\":{\"id\":\"bfpncurdo\",\"related\":\"iithtywu\",\"resourceName\":\"cbihwqk\"},\"cloud\":\"dntwjchrdgo\"}") - .toObject(SchemaRegistryClusterSpecEntity.class); - Assertions.assertEquals("vipmdscwxqupevzh", model.name()); - Assertions.assertEquals("totxhojujb", model.httpEndpoint()); - Assertions.assertEquals("elmcuvhixbjxyfw", model.packageProperty()); - Assertions.assertEquals("r", model.region().id()); - Assertions.assertEquals("o", model.region().related()); - Assertions.assertEquals("ttpkiwkkbnujrywv", model.region().resourceName()); - Assertions.assertEquals("bfpncurdo", model.environment().id()); - Assertions.assertEquals("iithtywu", model.environment().related()); - Assertions.assertEquals("cbihwqk", model.environment().resourceName()); - Assertions.assertEquals("dntwjchrdgo", model.cloud()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SchemaRegistryClusterSpecEntity model = new SchemaRegistryClusterSpecEntity().withName("vipmdscwxqupevzh") - .withHttpEndpoint("totxhojujb") - .withPackageProperty("elmcuvhixbjxyfw") - .withRegion(new SchemaRegistryClusterEnvironmentRegionEntity().withId("r") - .withRelated("o") - .withResourceName("ttpkiwkkbnujrywv")) - .withEnvironment(new SchemaRegistryClusterEnvironmentRegionEntity().withId("bfpncurdo") - .withRelated("iithtywu") - .withResourceName("cbihwqk")) - .withCloud("dntwjchrdgo"); - model = BinaryData.fromObject(model).toObject(SchemaRegistryClusterSpecEntity.class); - Assertions.assertEquals("vipmdscwxqupevzh", model.name()); - Assertions.assertEquals("totxhojujb", model.httpEndpoint()); - Assertions.assertEquals("elmcuvhixbjxyfw", model.packageProperty()); - Assertions.assertEquals("r", model.region().id()); - Assertions.assertEquals("o", model.region().related()); - Assertions.assertEquals("ttpkiwkkbnujrywv", model.region().resourceName()); - Assertions.assertEquals("bfpncurdo", model.environment().id()); - Assertions.assertEquals("iithtywu", model.environment().related()); - Assertions.assertEquals("cbihwqk", model.environment().resourceName()); - Assertions.assertEquals("dntwjchrdgo", model.cloud()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterStatusEntityTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterStatusEntityTests.java deleted file mode 100644 index fc0ac24a9c06..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/SchemaRegistryClusterStatusEntityTests.java +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.SchemaRegistryClusterStatusEntity; -import org.junit.jupiter.api.Assertions; - -public final class SchemaRegistryClusterStatusEntityTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - SchemaRegistryClusterStatusEntity model - = BinaryData.fromString("{\"phase\":\"dfdlwggyts\"}").toObject(SchemaRegistryClusterStatusEntity.class); - Assertions.assertEquals("dfdlwggyts", model.phase()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - SchemaRegistryClusterStatusEntity model = new SchemaRegistryClusterStatusEntity().withPhase("dfdlwggyts"); - model = BinaryData.fromObject(model).toObject(SchemaRegistryClusterStatusEntity.class); - Assertions.assertEquals("dfdlwggyts", model.phase()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ServiceAccountRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ServiceAccountRecordTests.java deleted file mode 100644 index 6ff89152885f..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ServiceAccountRecordTests.java +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.ServiceAccountRecord; -import org.junit.jupiter.api.Assertions; - -public final class ServiceAccountRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ServiceAccountRecord model = BinaryData.fromString( - "{\"kind\":\"i\",\"id\":\"xhqyudxorrqnb\",\"metadata\":{\"self\":\"zvyifqrvkdvj\",\"resource_name\":\"lrmv\",\"created_at\":\"f\",\"updated_at\":\"t\",\"deleted_at\":\"n\"},\"display_name\":\"lexxbczwtru\",\"description\":\"qzbqjvsov\"}") - .toObject(ServiceAccountRecord.class); - Assertions.assertEquals("i", model.kind()); - Assertions.assertEquals("xhqyudxorrqnb", model.id()); - Assertions.assertEquals("zvyifqrvkdvj", model.metadata().self()); - Assertions.assertEquals("lrmv", model.metadata().resourceName()); - Assertions.assertEquals("f", model.metadata().createdAt()); - Assertions.assertEquals("t", model.metadata().updatedAt()); - Assertions.assertEquals("n", model.metadata().deletedAt()); - Assertions.assertEquals("lexxbczwtru", model.displayName()); - Assertions.assertEquals("qzbqjvsov", model.description()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ServiceAccountRecord model = new ServiceAccountRecord().withKind("i") - .withId("xhqyudxorrqnb") - .withMetadata(new MetadataEntity().withSelf("zvyifqrvkdvj") - .withResourceName("lrmv") - .withCreatedAt("f") - .withUpdatedAt("t") - .withDeletedAt("n")) - .withDisplayName("lexxbczwtru") - .withDescription("qzbqjvsov"); - model = BinaryData.fromObject(model).toObject(ServiceAccountRecord.class); - Assertions.assertEquals("i", model.kind()); - Assertions.assertEquals("xhqyudxorrqnb", model.id()); - Assertions.assertEquals("zvyifqrvkdvj", model.metadata().self()); - Assertions.assertEquals("lrmv", model.metadata().resourceName()); - Assertions.assertEquals("f", model.metadata().createdAt()); - Assertions.assertEquals("t", model.metadata().updatedAt()); - Assertions.assertEquals("n", model.metadata().deletedAt()); - Assertions.assertEquals("lexxbczwtru", model.displayName()); - Assertions.assertEquals("qzbqjvsov", model.description()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserDetailTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserDetailTests.java deleted file mode 100644 index 57277ce483ad..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserDetailTests.java +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.UserDetail; -import org.junit.jupiter.api.Assertions; - -public final class UserDetailTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UserDetail model = BinaryData.fromString( - "{\"firstName\":\"qfprwzwbn\",\"lastName\":\"itnwuizgazxufi\",\"emailAddress\":\"uckyf\",\"userPrincipalName\":\"rfidfvzwdz\",\"aadEmail\":\"tymw\"}") - .toObject(UserDetail.class); - Assertions.assertEquals("qfprwzwbn", model.firstName()); - Assertions.assertEquals("itnwuizgazxufi", model.lastName()); - Assertions.assertEquals("uckyf", model.emailAddress()); - Assertions.assertEquals("rfidfvzwdz", model.userPrincipalName()); - Assertions.assertEquals("tymw", model.aadEmail()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UserDetail model = new UserDetail().withFirstName("qfprwzwbn") - .withLastName("itnwuizgazxufi") - .withEmailAddress("uckyf") - .withUserPrincipalName("rfidfvzwdz") - .withAadEmail("tymw"); - model = BinaryData.fromObject(model).toObject(UserDetail.class); - Assertions.assertEquals("qfprwzwbn", model.firstName()); - Assertions.assertEquals("itnwuizgazxufi", model.lastName()); - Assertions.assertEquals("uckyf", model.emailAddress()); - Assertions.assertEquals("rfidfvzwdz", model.userPrincipalName()); - Assertions.assertEquals("tymw", model.aadEmail()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserRecordTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserRecordTests.java deleted file mode 100644 index 1838c49caab7..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/UserRecordTests.java +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.models.MetadataEntity; -import com.azure.resourcemanager.confluent.models.UserRecord; -import org.junit.jupiter.api.Assertions; - -public final class UserRecordTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - UserRecord model = BinaryData.fromString( - "{\"kind\":\"rvqdra\",\"id\":\"jybige\",\"metadata\":{\"self\":\"fbowskanyk\",\"resource_name\":\"lcuiywgqywgndr\",\"created_at\":\"nhzgpphrcgyn\",\"updated_at\":\"cpecfvmmcoofs\",\"deleted_at\":\"zevgb\"},\"email\":\"jqabcypmivkwlzuv\",\"full_name\":\"fwnfnb\",\"auth_type\":\"fionl\"}") - .toObject(UserRecord.class); - Assertions.assertEquals("rvqdra", model.kind()); - Assertions.assertEquals("jybige", model.id()); - Assertions.assertEquals("fbowskanyk", model.metadata().self()); - Assertions.assertEquals("lcuiywgqywgndr", model.metadata().resourceName()); - Assertions.assertEquals("nhzgpphrcgyn", model.metadata().createdAt()); - Assertions.assertEquals("cpecfvmmcoofs", model.metadata().updatedAt()); - Assertions.assertEquals("zevgb", model.metadata().deletedAt()); - Assertions.assertEquals("jqabcypmivkwlzuv", model.email()); - Assertions.assertEquals("fwnfnb", model.fullName()); - Assertions.assertEquals("fionl", model.authType()); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - UserRecord model = new UserRecord().withKind("rvqdra") - .withId("jybige") - .withMetadata(new MetadataEntity().withSelf("fbowskanyk") - .withResourceName("lcuiywgqywgndr") - .withCreatedAt("nhzgpphrcgyn") - .withUpdatedAt("cpecfvmmcoofs") - .withDeletedAt("zevgb")) - .withEmail("jqabcypmivkwlzuv") - .withFullName("fwnfnb") - .withAuthType("fionl"); - model = BinaryData.fromObject(model).toObject(UserRecord.class); - Assertions.assertEquals("rvqdra", model.kind()); - Assertions.assertEquals("jybige", model.id()); - Assertions.assertEquals("fbowskanyk", model.metadata().self()); - Assertions.assertEquals("lcuiywgqywgndr", model.metadata().resourceName()); - Assertions.assertEquals("nhzgpphrcgyn", model.metadata().createdAt()); - Assertions.assertEquals("cpecfvmmcoofs", model.metadata().updatedAt()); - Assertions.assertEquals("zevgb", model.metadata().deletedAt()); - Assertions.assertEquals("jqabcypmivkwlzuv", model.email()); - Assertions.assertEquals("fwnfnb", model.fullName()); - Assertions.assertEquals("fionl", model.authType()); - } -} diff --git a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ValidationResponseInnerTests.java b/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ValidationResponseInnerTests.java deleted file mode 100644 index 90e6cbe1ea06..000000000000 --- a/sdk/confluent/azure-resourcemanager-confluent/src/test/java/com/azure/resourcemanager/confluent/generated/ValidationResponseInnerTests.java +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.resourcemanager.confluent.generated; - -import com.azure.core.util.BinaryData; -import com.azure.resourcemanager.confluent.fluent.models.ValidationResponseInner; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.Assertions; - -public final class ValidationResponseInnerTests { - @org.junit.jupiter.api.Test - public void testDeserialize() throws Exception { - ValidationResponseInner model - = BinaryData.fromString("{\"info\":{\"eeh\":\"sflnrosfqp\",\"swjdkirso\":\"zvypyqrimzinp\"}}") - .toObject(ValidationResponseInner.class); - Assertions.assertEquals("sflnrosfqp", model.info().get("eeh")); - } - - @org.junit.jupiter.api.Test - public void testSerialize() throws Exception { - ValidationResponseInner model - = new ValidationResponseInner().withInfo(mapOf("eeh", "sflnrosfqp", "swjdkirso", "zvypyqrimzinp")); - model = BinaryData.fromObject(model).toObject(ValidationResponseInner.class); - Assertions.assertEquals("sflnrosfqp", model.info().get("eeh")); - } - - // Use "Map.of" if available - @SuppressWarnings("unchecked") - private static Map mapOf(Object... inputs) { - Map map = new HashMap<>(); - for (int i = 0; i < inputs.length; i += 2) { - String key = (String) inputs[i]; - T value = (T) inputs[i + 1]; - map.put(key, value); - } - return map; - } -}