diff --git a/sdk/pineconevectordb/ci.yml b/sdk/pineconevectordb/ci.yml deleted file mode 100644 index 0b8325c8c903..000000000000 --- a/sdk/pineconevectordb/ci.yml +++ /dev/null @@ -1,35 +0,0 @@ -# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. - -trigger: - branches: - include: - - main - - hotfix/* - - release/* - paths: - include: - - sdk/pineconevectordb - - sdk/pineconevectordb/ci.yml - - sdk/pineconevectordb/Azure.ResourceManager.PineconeVectorDb - -pr: - branches: - include: - - main - - feature/* - - hotfix/* - - release/* - paths: - include: - - sdk/pineconevectordb - - sdk/pineconevectordb/ci.yml - - sdk/pineconevectordb/Azure.ResourceManager.PineconeVectorDb - -extends: - template: /eng/pipelines/templates/stages/archetype-sdk-client.yml - parameters: - ServiceDirectory: pineconevectordb - ArtifactName: packages - Artifacts: - - name: Azure.ResourceManager.PineconeVectorDb - safeName: AzureResourceManagerPineconeVectorDb diff --git a/sdk/storage/ci.yml b/sdk/storage/ci.yml index 2ca0501b60bd..4e749e6b85b1 100644 --- a/sdk/storage/ci.yml +++ b/sdk/storage/ci.yml @@ -12,6 +12,8 @@ trigger: - sdk/storage/Azure.Storage.DataMovement/ - sdk/storage/Azure.Storage.DataMovement.Blobs/ - sdk/storage/Azure.Storage.DataMovement.Files/ + exclude: + - sdk/storage/Azure.ResourceManager.Storage/ - sdk/storage/Azure.Storage.DataMovement.Blobs.Files.Shares/ exclude: - sdk/storage/Azure.ResourceManager.Storage/ diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.net8.0.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.net8.0.cs index 63ba583f0b9d..461ddf8ddaac 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.net8.0.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.net8.0.cs @@ -57,6 +57,12 @@ protected AmlFileSystemResource() { } public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAutoExportJob(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAutoExportJobAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobCollection GetAutoExportJobs() { throw null; } + public virtual Azure.Response GetAutoImportJob(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAutoImportJobAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobCollection GetAutoImportJobs() { throw null; } public virtual Azure.Response GetStorageCacheImportJob(string importJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> GetStorageCacheImportJobAsync(string importJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheImportJobCollection GetStorageCacheImportJobs() { throw null; } @@ -72,6 +78,152 @@ protected AmlFileSystemResource() { } public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AmlFileSystemPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AmlFileSystemPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } + public partial class AutoExportJobCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable + { + protected AutoExportJobCollection() { } + public virtual Azure.ResourceManager.ArmOperation CreateOrUpdate(Azure.WaitUntil waitUntil, string autoExportJobName, Azure.ResourceManager.StorageCache.AutoExportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateOrUpdateAsync(Azure.WaitUntil waitUntil, string autoExportJobName, Azure.ResourceManager.StorageCache.AutoExportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Exists(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> ExistsAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAll(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAllAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.NullableResponse GetIfExists(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetIfExistsAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + System.Collections.Generic.IAsyncEnumerator System.Collections.Generic.IAsyncEnumerable.GetAsyncEnumerator(System.Threading.CancellationToken cancellationToken) { throw null; } + System.Collections.Generic.IEnumerator System.Collections.Generic.IEnumerable.GetEnumerator() { throw null; } + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; } + } + public partial class AutoExportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoExportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IList AutoExportPrefixes { get { throw null; } } + public long? CurrentIterationFilesDiscovered { get { throw null; } } + public long? CurrentIterationFilesExported { get { throw null; } } + public long? CurrentIterationFilesFailed { get { throw null; } } + public long? CurrentIterationMiBDiscovered { get { throw null; } } + public long? CurrentIterationMiBExported { get { throw null; } } + public int? ExportIterationCount { get { throw null; } } + public System.DateTimeOffset? LastCompletionTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastStartedTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastSuccessfulIterationCompletionTimeUTC { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType? ProvisioningState { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoExportStatusType? State { get { throw null; } set { } } + public string StatusCode { get { throw null; } } + public string StatusMessage { get { throw null; } } + public long? TotalFilesExported { get { throw null; } } + public long? TotalFilesFailed { get { throw null; } } + public long? TotalMiBExported { get { throw null; } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AutoExportJobResource : Azure.ResourceManager.ArmResource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public static readonly Azure.Core.ResourceType ResourceType; + protected AutoExportJobResource() { } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobData Data { get { throw null; } } + public virtual bool HasData { get { throw null; } } + public virtual Azure.Response AddTag(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddTagAsync(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Core.ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Delete(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response RemoveTag(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> RemoveTagAsync(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response SetTags(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> SetTagsAsync(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + public partial class AutoImportJobCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable + { + protected AutoImportJobCollection() { } + public virtual Azure.ResourceManager.ArmOperation CreateOrUpdate(Azure.WaitUntil waitUntil, string autoImportJobName, Azure.ResourceManager.StorageCache.AutoImportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateOrUpdateAsync(Azure.WaitUntil waitUntil, string autoImportJobName, Azure.ResourceManager.StorageCache.AutoImportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Exists(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> ExistsAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAll(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAllAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.NullableResponse GetIfExists(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetIfExistsAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + System.Collections.Generic.IAsyncEnumerator System.Collections.Generic.IAsyncEnumerable.GetAsyncEnumerator(System.Threading.CancellationToken cancellationToken) { throw null; } + System.Collections.Generic.IEnumerator System.Collections.Generic.IEnumerable.GetEnumerator() { throw null; } + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; } + } + public partial class AutoImportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoImportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IList AutoImportPrefixes { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents BlobSyncEvents { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? ConflictResolutionMode { get { throw null; } set { } } + public bool? EnableDeletions { get { throw null; } set { } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } + public System.DateTimeOffset? LastCompletionTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastStartedTimeUTC { get { throw null; } } + public long? MaximumErrors { get { throw null; } set { } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState? ProvisioningState { get { throw null; } } + public long? RateOfBlobImport { get { throw null; } } + public long? RateOfBlobWalk { get { throw null; } } + public System.DateTimeOffset? ScanEndOn { get { throw null; } } + public System.DateTimeOffset? ScanStartOn { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobState? State { get { throw null; } } + public long? TotalBlobsImported { get { throw null; } } + public long? TotalBlobsWalked { get { throw null; } } + public long? TotalConflicts { get { throw null; } } + public long? TotalErrors { get { throw null; } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AutoImportJobResource : Azure.ResourceManager.ArmResource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public static readonly Azure.Core.ResourceType ResourceType; + protected AutoImportJobResource() { } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobData Data { get { throw null; } } + public virtual bool HasData { get { throw null; } } + public virtual Azure.Response AddTag(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddTagAsync(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Core.ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Delete(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response RemoveTag(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> RemoveTagAsync(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response SetTags(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> SetTagsAsync(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } public partial class StorageCacheCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable { protected StorageCacheCollection() { } @@ -125,6 +277,8 @@ public static partial class StorageCacheExtensions public static Azure.ResourceManager.StorageCache.AmlFileSystemCollection GetAmlFileSystems(this Azure.ResourceManager.Resources.ResourceGroupResource resourceGroupResource) { throw null; } public static Azure.Pageable GetAmlFileSystems(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static Azure.AsyncPageable GetAmlFileSystemsAsync(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoExportJobResource GetAutoExportJobResource(this Azure.ResourceManager.ArmClient client, Azure.Core.ResourceIdentifier id) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoImportJobResource GetAutoImportJobResource(this Azure.ResourceManager.ArmClient client, Azure.Core.ResourceIdentifier id) { throw null; } public static Azure.Response GetRequiredAmlFSSubnetsSize(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSizeContent content = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static System.Threading.Tasks.Task> GetRequiredAmlFSSubnetsSizeAsync(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSizeContent content = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static Azure.Response GetStorageCache(this Azure.ResourceManager.Resources.ResourceGroupResource resourceGroupResource, string cacheName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -162,13 +316,20 @@ protected StorageCacheImportJobCollection() { } public partial class StorageCacheImportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public StorageCacheImportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? AdminStatus { get { throw null; } set { } } public long? BlobsImportedPerSecond { get { throw null; } } public long? BlobsWalkedPerSecond { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? ConflictResolutionMode { get { throw null; } set { } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } public System.Collections.Generic.IList ImportPrefixes { get { throw null; } } public System.DateTimeOffset? LastCompletionOn { get { throw null; } } public System.DateTimeOffset? LastStartedOn { get { throw null; } } public int? MaximumErrors { get { throw null; } set { } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? ProvisioningState { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ImportStatusType? State { get { throw null; } } public string StatusMessage { get { throw null; } } @@ -337,6 +498,8 @@ public partial class MockableStorageCacheArmClient : Azure.ResourceManager.ArmRe { protected MockableStorageCacheArmClient() { } public virtual Azure.ResourceManager.StorageCache.AmlFileSystemResource GetAmlFileSystemResource(Azure.Core.ResourceIdentifier id) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobResource GetAutoExportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobResource GetAutoImportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheImportJobResource GetStorageCacheImportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheResource GetStorageCacheResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageTargetResource GetStorageTargetResource(Azure.Core.ResourceIdentifier id) { throw null; } @@ -642,13 +805,18 @@ public static partial class ArmStorageCacheModelFactory public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealth AmlFileSystemHealth(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealthStateType? state = default(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealthStateType?), string statusCode = null, string statusDescription = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemPropertiesHsm AmlFileSystemPropertiesHsm(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHsmSettings settings = null, System.Collections.Generic.IEnumerable archiveStatus = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemRootSquashSettings AmlFileSystemRootSquashSettings(Azure.ResourceManager.StorageCache.Models.AmlFileSystemSquashMode? mode = default(Azure.ResourceManager.StorageCache.Models.AmlFileSystemSquashMode?), string noSquashNidLists = null, long? squashUID = default(long?), long? squashGID = default(long?), string status = null) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoExportJobData AutoExportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus?), System.Collections.Generic.IEnumerable autoExportPrefixes = null, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType?), string statusCode = null, string statusMessage = null, long? totalFilesExported = default(long?), long? totalMiBExported = default(long?), long? totalFilesFailed = default(long?), int? exportIterationCount = default(int?), System.DateTimeOffset? lastSuccessfulIterationCompletionTimeUTC = default(System.DateTimeOffset?), long? currentIterationFilesDiscovered = default(long?), long? currentIterationMiBDiscovered = default(long?), long? currentIterationFilesExported = default(long?), long? currentIterationMiBExported = default(long?), long? currentIterationFilesFailed = default(long?), System.DateTimeOffset? lastStartedTimeUTC = default(System.DateTimeOffset?), System.DateTimeOffset? lastCompletionTimeUTC = default(System.DateTimeOffset?)) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoImportJobData AutoImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState? provisioningState = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState?), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus?), System.Collections.Generic.IEnumerable autoImportPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), bool? enableDeletions = default(bool?), long? maximumErrors = default(long?), Azure.ResourceManager.StorageCache.Models.AutoImportJobState? state = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobState?), System.DateTimeOffset? scanStartOn = default(System.DateTimeOffset?), System.DateTimeOffset? scanEndOn = default(System.DateTimeOffset?), long? totalBlobsWalked = default(long?), long? rateOfBlobWalk = default(long?), long? totalBlobsImported = default(long?), long? rateOfBlobImport = default(long?), long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? totalErrors = default(long?), long? totalConflicts = default(long?), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents blobSyncEvents = null, System.DateTimeOffset? lastStartedTimeUTC = default(System.DateTimeOffset?), System.DateTimeOffset? lastCompletionTimeUTC = default(System.DateTimeOffset?)) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents AutoImportJobPropertiesStatusBlobSyncEvents(long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? totalBlobsImported = default(long?), long? rateOfBlobImport = default(long?), long? totalErrors = default(long?), long? totalConflicts = default(long?), long? deletions = default(long?), System.DateTimeOffset? lastChangeFeedEventConsumedOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastTimeFullySynchronized = default(System.DateTimeOffset?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.OutstandingCondition OutstandingCondition(System.DateTimeOffset? timestamp = default(System.DateTimeOffset?), string message = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.PrimingJob PrimingJob(string primingJobName = null, System.Uri primingManifestUri = null, string primingJobId = null, Azure.ResourceManager.StorageCache.Models.PrimingJobState? primingJobState = default(Azure.ResourceManager.StorageCache.Models.PrimingJobState?), string primingJobStatus = null, string primingJobDetails = null, double? primingJobPercentComplete = default(double?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSize RequiredAmlFileSystemSubnetsSize(int? filesystemSubnetSize = default(int?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheActiveDirectorySettings StorageCacheActiveDirectorySettings(System.Net.IPAddress primaryDnsIPAddress = null, System.Net.IPAddress secondaryDnsIPAddress = null, string domainName = null, string domainNetBiosName = null, string cacheNetBiosName = null, Azure.ResourceManager.StorageCache.Models.DomainJoinedType? domainJoined = default(Azure.ResourceManager.StorageCache.Models.DomainJoinedType?), Azure.ResourceManager.StorageCache.Models.StorageCacheActiveDirectorySettingsCredentials credentials = null) { throw null; } public static Azure.ResourceManager.StorageCache.StorageCacheData StorageCacheData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.Models.ManagedServiceIdentity identity = null, string skuName = null, int? cacheSizeGB = default(int?), Azure.ResourceManager.StorageCache.Models.StorageCacheHealth health = null, System.Collections.Generic.IEnumerable mountAddresses = null, Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType?), Azure.Core.ResourceIdentifier subnet = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUpgradeStatus upgradeStatus = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUpgradeSettings upgradeSettings = null, Azure.ResourceManager.StorageCache.Models.StorageCacheNetworkSettings networkSettings = null, Azure.ResourceManager.StorageCache.Models.StorageCacheEncryptionSettings encryptionSettings = null, System.Collections.Generic.IEnumerable securityAccessPolicies = null, Azure.ResourceManager.StorageCache.Models.StorageCacheDirectorySettings directoryServicesSettings = null, System.Collections.Generic.IEnumerable zones = null, System.Collections.Generic.IEnumerable primingJobs = null, System.Collections.Generic.IEnumerable spaceAllocation = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheHealth StorageCacheHealth(Azure.ResourceManager.StorageCache.Models.StorageCacheHealthStateType? state = default(Azure.ResourceManager.StorageCache.Models.StorageCacheHealthStateType?), string statusDescription = null, System.Collections.Generic.IEnumerable conditions = null) { throw null; } - public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType?), System.Collections.Generic.IEnumerable importPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), int? maximumErrors = default(int?), Azure.ResourceManager.StorageCache.Models.ImportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.ImportStatusType?), string statusMessage = null, long? totalBlobsWalked = default(long?), long? blobsWalkedPerSecond = default(long?), long? totalBlobsImported = default(long?), long? blobsImportedPerSecond = default(long?), System.DateTimeOffset? lastCompletionOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastStartedOn = default(System.DateTimeOffset?), int? totalErrors = default(int?), int? totalConflicts = default(int?)) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id, string name, Azure.Core.ResourceType resourceType, Azure.ResourceManager.Models.SystemData systemData, System.Collections.Generic.IDictionary tags, Azure.Core.AzureLocation location, Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState, System.Collections.Generic.IEnumerable importPrefixes, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode, int? maximumErrors, Azure.ResourceManager.StorageCache.Models.ImportStatusType? state, string statusMessage, long? totalBlobsWalked, long? blobsWalkedPerSecond, long? totalBlobsImported, long? blobsImportedPerSecond, System.DateTimeOffset? lastCompletionOn, System.DateTimeOffset? lastStartedOn, int? totalErrors, int? totalConflicts) { throw null; } + public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus?), System.Collections.Generic.IEnumerable importPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), int? maximumErrors = default(int?), Azure.ResourceManager.StorageCache.Models.ImportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.ImportStatusType?), string statusMessage = null, long? totalBlobsWalked = default(long?), long? blobsWalkedPerSecond = default(long?), long? totalBlobsImported = default(long?), long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? blobsImportedPerSecond = default(long?), System.DateTimeOffset? lastCompletionOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastStartedOn = default(System.DateTimeOffset?), int? totalErrors = default(int?), int? totalConflicts = default(int?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheNetworkSettings StorageCacheNetworkSettings(int? mtu = default(int?), System.Collections.Generic.IEnumerable utilityAddresses = null, System.Collections.Generic.IEnumerable dnsServers = null, string dnsSearchDomain = null, string ntpServer = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheRestriction StorageCacheRestriction(string restrictionType = null, System.Collections.Generic.IEnumerable values = null, Azure.ResourceManager.StorageCache.Models.StorageCacheRestrictionReasonCode? reasonCode = default(Azure.ResourceManager.StorageCache.Models.StorageCacheRestrictionReasonCode?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheSku StorageCacheSku(string resourceType = null, System.Collections.Generic.IEnumerable capabilities = null, System.Collections.Generic.IEnumerable locations = null, System.Collections.Generic.IEnumerable locationInfo = null, string name = null, System.Collections.Generic.IEnumerable restrictions = null) { throw null; } @@ -661,6 +829,192 @@ public static partial class ArmStorageCacheModelFactory public static Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadSettings StorageCacheUsernameDownloadSettings(bool? enableExtendedGroups = default(bool?), Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameSourceType? usernameSource = default(Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameSourceType?), System.Uri groupFileUri = null, System.Uri userFileUri = null, string ldapServer = null, string ldapBaseDN = null, bool? encryptLdapConnection = default(bool?), bool? requireValidCertificate = default(bool?), bool? autoDownloadCertificate = default(bool?), System.Uri caCertificateUri = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadedType? usernameDownloaded = default(Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadedType?), Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadCredential credentials = null) { throw null; } public static Azure.ResourceManager.StorageCache.StorageTargetData StorageTargetData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IEnumerable junctions = null, Azure.ResourceManager.StorageCache.Models.StorageTargetType? targetType = default(Azure.ResourceManager.StorageCache.Models.StorageTargetType?), Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.StorageTargetOperationalStateType? state = default(Azure.ResourceManager.StorageCache.Models.StorageTargetOperationalStateType?), Azure.ResourceManager.StorageCache.Models.Nfs3Target nfs3 = null, Azure.Core.ResourceIdentifier clfsTarget = null, System.Collections.Generic.IDictionary unknownAttributes = null, Azure.ResourceManager.StorageCache.Models.BlobNfsTarget blobNfs = null, int? allocationPercentage = default(int?), Azure.Core.AzureLocation? location = default(Azure.Core.AzureLocation?)) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportJobAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportJobAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoExportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoExportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportJobProvisioningStateType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportJobProvisioningStateType(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Canceled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Creating { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Deleting { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Succeeded { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Updating { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType left, Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType left, Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportStatusType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportStatusType(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Disabled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType DisableFailed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Disabling { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType InProgress { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType left, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportStatusType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType left, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoImportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoImportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobPropertiesAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobPropertiesAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobPropertiesProvisioningState : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobPropertiesProvisioningState(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Canceled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Creating { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Deleting { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Succeeded { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Updating { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoImportJobPropertiesStatusBlobSyncEvents : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AutoImportJobPropertiesStatusBlobSyncEvents() { } + public long? Deletions { get { throw null; } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } + public System.DateTimeOffset? LastChangeFeedEventConsumedOn { get { throw null; } } + public System.DateTimeOffset? LastTimeFullySynchronized { get { throw null; } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } + public long? RateOfBlobImport { get { throw null; } } + public long? TotalBlobsImported { get { throw null; } } + public long? TotalConflicts { get { throw null; } } + public long? TotalErrors { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobState : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobState(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Disabled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Disabling { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState InProgress { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobState other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobState right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobState (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobState right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobUpdatePropertiesAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobUpdatePropertiesAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } public partial class BlobNfsTarget : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public BlobNfsTarget() { } @@ -715,6 +1069,24 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public override string ToString() { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ImportJobAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ImportJobAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus Active { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus Cancel { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct ImportJobProvisioningStateType : System.IEquatable { private readonly object _dummy; @@ -1076,6 +1448,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class StorageCacheImportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public StorageCacheImportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? AdminStatus { get { throw null; } set { } } public System.Collections.Generic.IDictionary Tags { get { throw null; } } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.StorageCache.Models.StorageCacheImportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.netstandard2.0.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.netstandard2.0.cs index 63ba583f0b9d..461ddf8ddaac 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.netstandard2.0.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/api/Azure.ResourceManager.StorageCache.netstandard2.0.cs @@ -57,6 +57,12 @@ protected AmlFileSystemResource() { } public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAutoExportJob(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAutoExportJobAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobCollection GetAutoExportJobs() { throw null; } + public virtual Azure.Response GetAutoImportJob(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAutoImportJobAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobCollection GetAutoImportJobs() { throw null; } public virtual Azure.Response GetStorageCacheImportJob(string importJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> GetStorageCacheImportJobAsync(string importJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheImportJobCollection GetStorageCacheImportJobs() { throw null; } @@ -72,6 +78,152 @@ protected AmlFileSystemResource() { } public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AmlFileSystemPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AmlFileSystemPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } + public partial class AutoExportJobCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable + { + protected AutoExportJobCollection() { } + public virtual Azure.ResourceManager.ArmOperation CreateOrUpdate(Azure.WaitUntil waitUntil, string autoExportJobName, Azure.ResourceManager.StorageCache.AutoExportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateOrUpdateAsync(Azure.WaitUntil waitUntil, string autoExportJobName, Azure.ResourceManager.StorageCache.AutoExportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Exists(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> ExistsAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAll(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAllAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.NullableResponse GetIfExists(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetIfExistsAsync(string autoExportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + System.Collections.Generic.IAsyncEnumerator System.Collections.Generic.IAsyncEnumerable.GetAsyncEnumerator(System.Threading.CancellationToken cancellationToken) { throw null; } + System.Collections.Generic.IEnumerator System.Collections.Generic.IEnumerable.GetEnumerator() { throw null; } + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; } + } + public partial class AutoExportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoExportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IList AutoExportPrefixes { get { throw null; } } + public long? CurrentIterationFilesDiscovered { get { throw null; } } + public long? CurrentIterationFilesExported { get { throw null; } } + public long? CurrentIterationFilesFailed { get { throw null; } } + public long? CurrentIterationMiBDiscovered { get { throw null; } } + public long? CurrentIterationMiBExported { get { throw null; } } + public int? ExportIterationCount { get { throw null; } } + public System.DateTimeOffset? LastCompletionTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastStartedTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastSuccessfulIterationCompletionTimeUTC { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType? ProvisioningState { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoExportStatusType? State { get { throw null; } set { } } + public string StatusCode { get { throw null; } } + public string StatusMessage { get { throw null; } } + public long? TotalFilesExported { get { throw null; } } + public long? TotalFilesFailed { get { throw null; } } + public long? TotalMiBExported { get { throw null; } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AutoExportJobResource : Azure.ResourceManager.ArmResource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public static readonly Azure.Core.ResourceType ResourceType; + protected AutoExportJobResource() { } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobData Data { get { throw null; } } + public virtual bool HasData { get { throw null; } } + public virtual Azure.Response AddTag(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddTagAsync(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Core.ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Delete(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response RemoveTag(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> RemoveTagAsync(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response SetTags(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> SetTagsAsync(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoExportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + public partial class AutoImportJobCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable + { + protected AutoImportJobCollection() { } + public virtual Azure.ResourceManager.ArmOperation CreateOrUpdate(Azure.WaitUntil waitUntil, string autoImportJobName, Azure.ResourceManager.StorageCache.AutoImportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateOrUpdateAsync(Azure.WaitUntil waitUntil, string autoImportJobName, Azure.ResourceManager.StorageCache.AutoImportJobData data, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Exists(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> ExistsAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAll(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAllAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.NullableResponse GetIfExists(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetIfExistsAsync(string autoImportJobName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + System.Collections.Generic.IAsyncEnumerator System.Collections.Generic.IAsyncEnumerable.GetAsyncEnumerator(System.Threading.CancellationToken cancellationToken) { throw null; } + System.Collections.Generic.IEnumerator System.Collections.Generic.IEnumerable.GetEnumerator() { throw null; } + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; } + } + public partial class AutoImportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoImportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IList AutoImportPrefixes { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents BlobSyncEvents { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? ConflictResolutionMode { get { throw null; } set { } } + public bool? EnableDeletions { get { throw null; } set { } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } + public System.DateTimeOffset? LastCompletionTimeUTC { get { throw null; } } + public System.DateTimeOffset? LastStartedTimeUTC { get { throw null; } } + public long? MaximumErrors { get { throw null; } set { } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState? ProvisioningState { get { throw null; } } + public long? RateOfBlobImport { get { throw null; } } + public long? RateOfBlobWalk { get { throw null; } } + public System.DateTimeOffset? ScanEndOn { get { throw null; } } + public System.DateTimeOffset? ScanStartOn { get { throw null; } } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobState? State { get { throw null; } } + public long? TotalBlobsImported { get { throw null; } } + public long? TotalBlobsWalked { get { throw null; } } + public long? TotalConflicts { get { throw null; } } + public long? TotalErrors { get { throw null; } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AutoImportJobResource : Azure.ResourceManager.ArmResource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public static readonly Azure.Core.ResourceType ResourceType; + protected AutoImportJobResource() { } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobData Data { get { throw null; } } + public virtual bool HasData { get { throw null; } } + public virtual Azure.Response AddTag(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddTagAsync(string key, string value, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Core.ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Delete(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.WaitUntil waitUntil, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Get(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response RemoveTag(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> RemoveTagAsync(string key, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response SetTags(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> SetTagsAsync(System.Collections.Generic.IDictionary tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.AutoImportJobData System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + public virtual Azure.ResourceManager.ArmOperation Update(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> UpdateAsync(Azure.WaitUntil waitUntil, Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch patch, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } public partial class StorageCacheCollection : Azure.ResourceManager.ArmCollection, System.Collections.Generic.IAsyncEnumerable, System.Collections.Generic.IEnumerable, System.Collections.IEnumerable { protected StorageCacheCollection() { } @@ -125,6 +277,8 @@ public static partial class StorageCacheExtensions public static Azure.ResourceManager.StorageCache.AmlFileSystemCollection GetAmlFileSystems(this Azure.ResourceManager.Resources.ResourceGroupResource resourceGroupResource) { throw null; } public static Azure.Pageable GetAmlFileSystems(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static Azure.AsyncPageable GetAmlFileSystemsAsync(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoExportJobResource GetAutoExportJobResource(this Azure.ResourceManager.ArmClient client, Azure.Core.ResourceIdentifier id) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoImportJobResource GetAutoImportJobResource(this Azure.ResourceManager.ArmClient client, Azure.Core.ResourceIdentifier id) { throw null; } public static Azure.Response GetRequiredAmlFSSubnetsSize(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSizeContent content = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static System.Threading.Tasks.Task> GetRequiredAmlFSSubnetsSizeAsync(this Azure.ResourceManager.Resources.SubscriptionResource subscriptionResource, Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSizeContent content = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static Azure.Response GetStorageCache(this Azure.ResourceManager.Resources.ResourceGroupResource resourceGroupResource, string cacheName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } @@ -162,13 +316,20 @@ protected StorageCacheImportJobCollection() { } public partial class StorageCacheImportJobData : Azure.ResourceManager.Models.TrackedResourceData, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public StorageCacheImportJobData(Azure.Core.AzureLocation location) { } + public Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? AdminStatus { get { throw null; } set { } } public long? BlobsImportedPerSecond { get { throw null; } } public long? BlobsWalkedPerSecond { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? ConflictResolutionMode { get { throw null; } set { } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } public System.Collections.Generic.IList ImportPrefixes { get { throw null; } } public System.DateTimeOffset? LastCompletionOn { get { throw null; } } public System.DateTimeOffset? LastStartedOn { get { throw null; } } public int? MaximumErrors { get { throw null; } set { } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? ProvisioningState { get { throw null; } } public Azure.ResourceManager.StorageCache.Models.ImportStatusType? State { get { throw null; } } public string StatusMessage { get { throw null; } } @@ -337,6 +498,8 @@ public partial class MockableStorageCacheArmClient : Azure.ResourceManager.ArmRe { protected MockableStorageCacheArmClient() { } public virtual Azure.ResourceManager.StorageCache.AmlFileSystemResource GetAmlFileSystemResource(Azure.Core.ResourceIdentifier id) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoExportJobResource GetAutoExportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } + public virtual Azure.ResourceManager.StorageCache.AutoImportJobResource GetAutoImportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheImportJobResource GetStorageCacheImportJobResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageCacheResource GetStorageCacheResource(Azure.Core.ResourceIdentifier id) { throw null; } public virtual Azure.ResourceManager.StorageCache.StorageTargetResource GetStorageTargetResource(Azure.Core.ResourceIdentifier id) { throw null; } @@ -642,13 +805,18 @@ public static partial class ArmStorageCacheModelFactory public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealth AmlFileSystemHealth(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealthStateType? state = default(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHealthStateType?), string statusCode = null, string statusDescription = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemPropertiesHsm AmlFileSystemPropertiesHsm(Azure.ResourceManager.StorageCache.Models.AmlFileSystemHsmSettings settings = null, System.Collections.Generic.IEnumerable archiveStatus = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.AmlFileSystemRootSquashSettings AmlFileSystemRootSquashSettings(Azure.ResourceManager.StorageCache.Models.AmlFileSystemSquashMode? mode = default(Azure.ResourceManager.StorageCache.Models.AmlFileSystemSquashMode?), string noSquashNidLists = null, long? squashUID = default(long?), long? squashGID = default(long?), string status = null) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoExportJobData AutoExportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus?), System.Collections.Generic.IEnumerable autoExportPrefixes = null, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType?), string statusCode = null, string statusMessage = null, long? totalFilesExported = default(long?), long? totalMiBExported = default(long?), long? totalFilesFailed = default(long?), int? exportIterationCount = default(int?), System.DateTimeOffset? lastSuccessfulIterationCompletionTimeUTC = default(System.DateTimeOffset?), long? currentIterationFilesDiscovered = default(long?), long? currentIterationMiBDiscovered = default(long?), long? currentIterationFilesExported = default(long?), long? currentIterationMiBExported = default(long?), long? currentIterationFilesFailed = default(long?), System.DateTimeOffset? lastStartedTimeUTC = default(System.DateTimeOffset?), System.DateTimeOffset? lastCompletionTimeUTC = default(System.DateTimeOffset?)) { throw null; } + public static Azure.ResourceManager.StorageCache.AutoImportJobData AutoImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState? provisioningState = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState?), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus?), System.Collections.Generic.IEnumerable autoImportPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), bool? enableDeletions = default(bool?), long? maximumErrors = default(long?), Azure.ResourceManager.StorageCache.Models.AutoImportJobState? state = default(Azure.ResourceManager.StorageCache.Models.AutoImportJobState?), System.DateTimeOffset? scanStartOn = default(System.DateTimeOffset?), System.DateTimeOffset? scanEndOn = default(System.DateTimeOffset?), long? totalBlobsWalked = default(long?), long? rateOfBlobWalk = default(long?), long? totalBlobsImported = default(long?), long? rateOfBlobImport = default(long?), long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? totalErrors = default(long?), long? totalConflicts = default(long?), Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents blobSyncEvents = null, System.DateTimeOffset? lastStartedTimeUTC = default(System.DateTimeOffset?), System.DateTimeOffset? lastCompletionTimeUTC = default(System.DateTimeOffset?)) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents AutoImportJobPropertiesStatusBlobSyncEvents(long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? totalBlobsImported = default(long?), long? rateOfBlobImport = default(long?), long? totalErrors = default(long?), long? totalConflicts = default(long?), long? deletions = default(long?), System.DateTimeOffset? lastChangeFeedEventConsumedOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastTimeFullySynchronized = default(System.DateTimeOffset?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.OutstandingCondition OutstandingCondition(System.DateTimeOffset? timestamp = default(System.DateTimeOffset?), string message = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.PrimingJob PrimingJob(string primingJobName = null, System.Uri primingManifestUri = null, string primingJobId = null, Azure.ResourceManager.StorageCache.Models.PrimingJobState? primingJobState = default(Azure.ResourceManager.StorageCache.Models.PrimingJobState?), string primingJobStatus = null, string primingJobDetails = null, double? primingJobPercentComplete = default(double?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.RequiredAmlFileSystemSubnetsSize RequiredAmlFileSystemSubnetsSize(int? filesystemSubnetSize = default(int?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheActiveDirectorySettings StorageCacheActiveDirectorySettings(System.Net.IPAddress primaryDnsIPAddress = null, System.Net.IPAddress secondaryDnsIPAddress = null, string domainName = null, string domainNetBiosName = null, string cacheNetBiosName = null, Azure.ResourceManager.StorageCache.Models.DomainJoinedType? domainJoined = default(Azure.ResourceManager.StorageCache.Models.DomainJoinedType?), Azure.ResourceManager.StorageCache.Models.StorageCacheActiveDirectorySettingsCredentials credentials = null) { throw null; } public static Azure.ResourceManager.StorageCache.StorageCacheData StorageCacheData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.Models.ManagedServiceIdentity identity = null, string skuName = null, int? cacheSizeGB = default(int?), Azure.ResourceManager.StorageCache.Models.StorageCacheHealth health = null, System.Collections.Generic.IEnumerable mountAddresses = null, Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType?), Azure.Core.ResourceIdentifier subnet = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUpgradeStatus upgradeStatus = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUpgradeSettings upgradeSettings = null, Azure.ResourceManager.StorageCache.Models.StorageCacheNetworkSettings networkSettings = null, Azure.ResourceManager.StorageCache.Models.StorageCacheEncryptionSettings encryptionSettings = null, System.Collections.Generic.IEnumerable securityAccessPolicies = null, Azure.ResourceManager.StorageCache.Models.StorageCacheDirectorySettings directoryServicesSettings = null, System.Collections.Generic.IEnumerable zones = null, System.Collections.Generic.IEnumerable primingJobs = null, System.Collections.Generic.IEnumerable spaceAllocation = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheHealth StorageCacheHealth(Azure.ResourceManager.StorageCache.Models.StorageCacheHealthStateType? state = default(Azure.ResourceManager.StorageCache.Models.StorageCacheHealthStateType?), string statusDescription = null, System.Collections.Generic.IEnumerable conditions = null) { throw null; } - public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType?), System.Collections.Generic.IEnumerable importPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), int? maximumErrors = default(int?), Azure.ResourceManager.StorageCache.Models.ImportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.ImportStatusType?), string statusMessage = null, long? totalBlobsWalked = default(long?), long? blobsWalkedPerSecond = default(long?), long? totalBlobsImported = default(long?), long? blobsImportedPerSecond = default(long?), System.DateTimeOffset? lastCompletionOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastStartedOn = default(System.DateTimeOffset?), int? totalErrors = default(int?), int? totalConflicts = default(int?)) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id, string name, Azure.Core.ResourceType resourceType, Azure.ResourceManager.Models.SystemData systemData, System.Collections.Generic.IDictionary tags, Azure.Core.AzureLocation location, Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState, System.Collections.Generic.IEnumerable importPrefixes, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode, int? maximumErrors, Azure.ResourceManager.StorageCache.Models.ImportStatusType? state, string statusMessage, long? totalBlobsWalked, long? blobsWalkedPerSecond, long? totalBlobsImported, long? blobsImportedPerSecond, System.DateTimeOffset? lastCompletionOn, System.DateTimeOffset? lastStartedOn, int? totalErrors, int? totalConflicts) { throw null; } + public static Azure.ResourceManager.StorageCache.StorageCacheImportJobData StorageCacheImportJobData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IDictionary tags = null, Azure.Core.AzureLocation location = default(Azure.Core.AzureLocation), Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.ImportJobProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? adminStatus = default(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus?), System.Collections.Generic.IEnumerable importPrefixes = null, Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode? conflictResolutionMode = default(Azure.ResourceManager.StorageCache.Models.ConflictResolutionMode?), int? maximumErrors = default(int?), Azure.ResourceManager.StorageCache.Models.ImportStatusType? state = default(Azure.ResourceManager.StorageCache.Models.ImportStatusType?), string statusMessage = null, long? totalBlobsWalked = default(long?), long? blobsWalkedPerSecond = default(long?), long? totalBlobsImported = default(long?), long? importedFiles = default(long?), long? importedDirectories = default(long?), long? importedSymlinks = default(long?), long? preexistingFiles = default(long?), long? preexistingDirectories = default(long?), long? preexistingSymlinks = default(long?), long? blobsImportedPerSecond = default(long?), System.DateTimeOffset? lastCompletionOn = default(System.DateTimeOffset?), System.DateTimeOffset? lastStartedOn = default(System.DateTimeOffset?), int? totalErrors = default(int?), int? totalConflicts = default(int?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheNetworkSettings StorageCacheNetworkSettings(int? mtu = default(int?), System.Collections.Generic.IEnumerable utilityAddresses = null, System.Collections.Generic.IEnumerable dnsServers = null, string dnsSearchDomain = null, string ntpServer = null) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheRestriction StorageCacheRestriction(string restrictionType = null, System.Collections.Generic.IEnumerable values = null, Azure.ResourceManager.StorageCache.Models.StorageCacheRestrictionReasonCode? reasonCode = default(Azure.ResourceManager.StorageCache.Models.StorageCacheRestrictionReasonCode?)) { throw null; } public static Azure.ResourceManager.StorageCache.Models.StorageCacheSku StorageCacheSku(string resourceType = null, System.Collections.Generic.IEnumerable capabilities = null, System.Collections.Generic.IEnumerable locations = null, System.Collections.Generic.IEnumerable locationInfo = null, string name = null, System.Collections.Generic.IEnumerable restrictions = null) { throw null; } @@ -661,6 +829,192 @@ public static partial class ArmStorageCacheModelFactory public static Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadSettings StorageCacheUsernameDownloadSettings(bool? enableExtendedGroups = default(bool?), Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameSourceType? usernameSource = default(Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameSourceType?), System.Uri groupFileUri = null, System.Uri userFileUri = null, string ldapServer = null, string ldapBaseDN = null, bool? encryptLdapConnection = default(bool?), bool? requireValidCertificate = default(bool?), bool? autoDownloadCertificate = default(bool?), System.Uri caCertificateUri = null, Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadedType? usernameDownloaded = default(Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadedType?), Azure.ResourceManager.StorageCache.Models.StorageCacheUsernameDownloadCredential credentials = null) { throw null; } public static Azure.ResourceManager.StorageCache.StorageTargetData StorageTargetData(Azure.Core.ResourceIdentifier id = null, string name = null, Azure.Core.ResourceType resourceType = default(Azure.Core.ResourceType), Azure.ResourceManager.Models.SystemData systemData = null, System.Collections.Generic.IEnumerable junctions = null, Azure.ResourceManager.StorageCache.Models.StorageTargetType? targetType = default(Azure.ResourceManager.StorageCache.Models.StorageTargetType?), Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType? provisioningState = default(Azure.ResourceManager.StorageCache.Models.StorageCacheProvisioningStateType?), Azure.ResourceManager.StorageCache.Models.StorageTargetOperationalStateType? state = default(Azure.ResourceManager.StorageCache.Models.StorageTargetOperationalStateType?), Azure.ResourceManager.StorageCache.Models.Nfs3Target nfs3 = null, Azure.Core.ResourceIdentifier clfsTarget = null, System.Collections.Generic.IDictionary unknownAttributes = null, Azure.ResourceManager.StorageCache.Models.BlobNfsTarget blobNfs = null, int? allocationPercentage = default(int?), Azure.Core.AzureLocation? location = default(Azure.Core.AzureLocation?)) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportJobAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportJobAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoExportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoExportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.AutoExportJobAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoExportJobPatch System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportJobProvisioningStateType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportJobProvisioningStateType(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Canceled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Creating { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Deleting { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Succeeded { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType Updating { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType left, Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType left, Azure.ResourceManager.StorageCache.Models.AutoExportJobProvisioningStateType right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoExportStatusType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoExportStatusType(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Disabled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType DisableFailed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Disabling { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoExportStatusType InProgress { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType left, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoExportStatusType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoExportStatusType left, Azure.ResourceManager.StorageCache.Models.AutoExportStatusType right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoImportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AutoImportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus? AdminStatus { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPatch System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobPropertiesAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobPropertiesAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobPropertiesProvisioningState : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobPropertiesProvisioningState(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Canceled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Creating { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Deleting { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Succeeded { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState Updating { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesProvisioningState right) { throw null; } + public override string ToString() { throw null; } + } + public partial class AutoImportJobPropertiesStatusBlobSyncEvents : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AutoImportJobPropertiesStatusBlobSyncEvents() { } + public long? Deletions { get { throw null; } } + public long? ImportedDirectories { get { throw null; } } + public long? ImportedFiles { get { throw null; } } + public long? ImportedSymlinks { get { throw null; } } + public System.DateTimeOffset? LastChangeFeedEventConsumedOn { get { throw null; } } + public System.DateTimeOffset? LastTimeFullySynchronized { get { throw null; } } + public long? PreexistingDirectories { get { throw null; } } + public long? PreexistingFiles { get { throw null; } } + public long? PreexistingSymlinks { get { throw null; } } + public long? RateOfBlobImport { get { throw null; } } + public long? TotalBlobsImported { get { throw null; } } + public long? TotalConflicts { get { throw null; } } + public long? TotalErrors { get { throw null; } } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.StorageCache.Models.AutoImportJobPropertiesStatusBlobSyncEvents System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobState : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobState(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Disabled { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Disabling { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState Failed { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobState InProgress { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobState other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobState right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobState (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobState left, Azure.ResourceManager.StorageCache.Models.AutoImportJobState right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AutoImportJobUpdatePropertiesAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AutoImportJobUpdatePropertiesAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus Disable { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus Enable { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus left, Azure.ResourceManager.StorageCache.Models.AutoImportJobUpdatePropertiesAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } public partial class BlobNfsTarget : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public BlobNfsTarget() { } @@ -715,6 +1069,24 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public override string ToString() { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ImportJobAdminStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ImportJobAdminStatus(string value) { throw null; } + public static Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus Active { get { throw null; } } + public static Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus Cancel { get { throw null; } } + public bool Equals(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus right) { throw null; } + public static implicit operator Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus left, Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct ImportJobProvisioningStateType : System.IEquatable { private readonly object _dummy; @@ -1076,6 +1448,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer public partial class StorageCacheImportJobPatch : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public StorageCacheImportJobPatch() { } + public Azure.ResourceManager.StorageCache.Models.ImportJobAdminStatus? AdminStatus { get { throw null; } set { } } public System.Collections.Generic.IDictionary Tags { get { throw null; } } protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.StorageCache.Models.StorageCacheImportJobPatch System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemCollection.cs index a6ede17cef96..21d4741013bf 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemCollection.cs @@ -23,7 +23,7 @@ public partial class Sample_AmlFileSystemCollection [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_AmlFilesystemsCreateOrUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_CreateOrUpdate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_CreateOrUpdate.json // this example is just showing the usage of "amlFilesystems_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -98,7 +98,7 @@ public async Task CreateOrUpdate_AmlFilesystemsCreateOrUpdate() [Ignore("Only validating compilation of examples")] public async Task Get_AmlFilesystemsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Get.json // this example is just showing the usage of "amlFilesystems_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -131,7 +131,7 @@ public async Task Get_AmlFilesystemsGet() [Ignore("Only validating compilation of examples")] public async Task GetAll_AmlFilesystemsListByResourceGroup() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_ListByResourceGroup.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_ListByResourceGroup.json // this example is just showing the usage of "amlFilesystems_ListByResourceGroup" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -166,7 +166,7 @@ public async Task GetAll_AmlFilesystemsListByResourceGroup() [Ignore("Only validating compilation of examples")] public async Task Exists_AmlFilesystemsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Get.json // this example is just showing the usage of "amlFilesystems_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -195,7 +195,7 @@ public async Task Exists_AmlFilesystemsGet() [Ignore("Only validating compilation of examples")] public async Task GetIfExists_AmlFilesystemsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Get.json // this example is just showing the usage of "amlFilesystems_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemResource.cs index 88f2b014b8b3..06ac17d32508 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AmlFileSystemResource.cs @@ -21,7 +21,7 @@ public partial class Sample_AmlFileSystemResource [Ignore("Only validating compilation of examples")] public async Task Get_AmlFilesystemsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Get.json // this example is just showing the usage of "amlFilesystems_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -51,7 +51,7 @@ public async Task Get_AmlFilesystemsGet() [Ignore("Only validating compilation of examples")] public async Task Delete_AmlFilesystemsDelete() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Delete.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Delete.json // this example is just showing the usage of "amlFilesystems_Delete" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -77,7 +77,7 @@ public async Task Delete_AmlFilesystemsDelete() [Ignore("Only validating compilation of examples")] public async Task Update_AmlFilesystemsUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Update.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Update.json // this example is just showing the usage of "amlFilesystems_Update" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -131,7 +131,7 @@ public async Task Update_AmlFilesystemsUpdate() [Ignore("Only validating compilation of examples")] public async Task Archive_AmlFilesystemsArchive() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_Archive.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_Archive.json // this example is just showing the usage of "amlFilesystems_Archive" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -161,7 +161,7 @@ public async Task Archive_AmlFilesystemsArchive() [Ignore("Only validating compilation of examples")] public async Task CancelArchive_AmlFilesystemsCancelArchive() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_CancelArchive.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_CancelArchive.json // this example is just showing the usage of "amlFilesystems_CancelArchive" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobCollection.cs new file mode 100644 index 000000000000..59e2f8b35f36 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobCollection.cs @@ -0,0 +1,203 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.ResourceManager.StorageCache.Samples +{ + public partial class Sample_AutoExportJobCollection + { + [Test] + [Ignore("Only validating compilation of examples")] + public async Task CreateOrUpdate_AutoExportJobsCreateOrUpdate() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_CreateOrUpdate.json + // this example is just showing the usage of "autoExportJobs_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoExportJobResource + AutoExportJobCollection collection = amlFileSystem.GetAutoExportJobs(); + + // invoke the operation + string autoExportJobName = "job1"; + AutoExportJobData data = new AutoExportJobData(new AzureLocation("eastus")) + { + AutoExportPrefixes = { "/" }, + Tags = +{ +["Dept"] = "ContosoAds" +}, + }; + ArmOperation lro = await collection.CreateOrUpdateAsync(WaitUntil.Completed, autoExportJobName, data); + AutoExportJobResource result = lro.Value; + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Get_AutoExportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Get.json + // this example is just showing the usage of "autoExportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoExportJobResource + AutoExportJobCollection collection = amlFileSystem.GetAutoExportJobs(); + + // invoke the operation + string autoExportJobName = "job1"; + AutoExportJobResource result = await collection.GetAsync(autoExportJobName); + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task GetAll_AutoExportJobsListByAmlFilesystem() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_ListByAmlFilesystem.json + // this example is just showing the usage of "autoExportJobs_ListByAmlFileSystem" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoExportJobResource + AutoExportJobCollection collection = amlFileSystem.GetAutoExportJobs(); + + // invoke the operation and iterate over the result + await foreach (AutoExportJobResource item in collection.GetAllAsync()) + { + // the variable item is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = item.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + Console.WriteLine("Succeeded"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Exists_AutoExportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Get.json + // this example is just showing the usage of "autoExportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoExportJobResource + AutoExportJobCollection collection = amlFileSystem.GetAutoExportJobs(); + + // invoke the operation + string autoExportJobName = "job1"; + bool result = await collection.ExistsAsync(autoExportJobName); + + Console.WriteLine($"Succeeded: {result}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task GetIfExists_AutoExportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Get.json + // this example is just showing the usage of "autoExportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoExportJobResource + AutoExportJobCollection collection = amlFileSystem.GetAutoExportJobs(); + + // invoke the operation + string autoExportJobName = "job1"; + NullableResponse response = await collection.GetIfExistsAsync(autoExportJobName); + AutoExportJobResource result = response.HasValue ? response.Value : null; + + if (result == null) + { + Console.WriteLine("Succeeded with null as result"); + } + else + { + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobResource.cs new file mode 100644 index 000000000000..aa37c0f09f47 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoExportJobResource.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.ResourceManager.StorageCache.Models; +using NUnit.Framework; + +namespace Azure.ResourceManager.StorageCache.Samples +{ + public partial class Sample_AutoExportJobResource + { + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Get_AutoExportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Get.json + // this example is just showing the usage of "autoExportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoExportJobResource created on azure + // for more information of creating AutoExportJobResource, please refer to the document of AutoExportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoExportJobName = "job1"; + ResourceIdentifier autoExportJobResourceId = AutoExportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + AutoExportJobResource autoExportJob = client.GetAutoExportJobResource(autoExportJobResourceId); + + // invoke the operation + AutoExportJobResource result = await autoExportJob.GetAsync(); + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Delete_AutoExportJobsDelete() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Delete.json + // this example is just showing the usage of "autoExportJobs_Delete" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoExportJobResource created on azure + // for more information of creating AutoExportJobResource, please refer to the document of AutoExportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoExportJobName = "job1"; + ResourceIdentifier autoExportJobResourceId = AutoExportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + AutoExportJobResource autoExportJob = client.GetAutoExportJobResource(autoExportJobResourceId); + + // invoke the operation + await autoExportJob.DeleteAsync(WaitUntil.Completed); + + Console.WriteLine("Succeeded"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Update_AutoExportJobsUpdate() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoExportJobs_Update.json + // this example is just showing the usage of "autoExportJobs_Update" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoExportJobResource created on azure + // for more information of creating AutoExportJobResource, please refer to the document of AutoExportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoExportJobName = "job1"; + ResourceIdentifier autoExportJobResourceId = AutoExportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + AutoExportJobResource autoExportJob = client.GetAutoExportJobResource(autoExportJobResourceId); + + // invoke the operation + AutoExportJobPatch patch = new AutoExportJobPatch + { + Tags = +{ +["Dept"] = "ContosoAds" +}, + }; + ArmOperation lro = await autoExportJob.UpdateAsync(WaitUntil.Completed, patch); + AutoExportJobResource result = lro.Value; + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoExportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobCollection.cs new file mode 100644 index 000000000000..11a7e4bad335 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobCollection.cs @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.ResourceManager.StorageCache.Models; +using NUnit.Framework; + +namespace Azure.ResourceManager.StorageCache.Samples +{ + public partial class Sample_AutoImportJobCollection + { + [Test] + [Ignore("Only validating compilation of examples")] + public async Task CreateOrUpdate_AutoImportJobsCreateOrUpdate() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_CreateOrUpdate.json + // this example is just showing the usage of "autoImportJobs_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoImportJobResource + AutoImportJobCollection collection = amlFileSystem.GetAutoImportJobs(); + + // invoke the operation + string autoImportJobName = "autojob1"; + AutoImportJobData data = new AutoImportJobData(new AzureLocation("eastus")) + { + AdminStatus = AutoImportJobPropertiesAdminStatus.Enable, + AutoImportPrefixes = { "/" }, + ConflictResolutionMode = ConflictResolutionMode.Skip, + EnableDeletions = false, + MaximumErrors = 0L, + Tags = +{ +["Dept"] = "ContosoAds" +}, + }; + ArmOperation lro = await collection.CreateOrUpdateAsync(WaitUntil.Completed, autoImportJobName, data); + AutoImportJobResource result = lro.Value; + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Get_AutoImportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Get.json + // this example is just showing the usage of "autoImportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoImportJobResource + AutoImportJobCollection collection = amlFileSystem.GetAutoImportJobs(); + + // invoke the operation + string autoImportJobName = "autojob1"; + AutoImportJobResource result = await collection.GetAsync(autoImportJobName); + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task GetAll_AutoImportJobsListByAmlFilesystem() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_ListByAmlFilesystem.json + // this example is just showing the usage of "autoImportJobs_ListByAmlFileSystem" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoImportJobResource + AutoImportJobCollection collection = amlFileSystem.GetAutoImportJobs(); + + // invoke the operation and iterate over the result + await foreach (AutoImportJobResource item in collection.GetAllAsync()) + { + // the variable item is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = item.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + Console.WriteLine("Succeeded"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Exists_AutoImportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Get.json + // this example is just showing the usage of "autoImportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoImportJobResource + AutoImportJobCollection collection = amlFileSystem.GetAutoImportJobs(); + + // invoke the operation + string autoImportJobName = "autojob1"; + bool result = await collection.ExistsAsync(autoImportJobName); + + Console.WriteLine($"Succeeded: {result}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task GetIfExists_AutoImportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Get.json + // this example is just showing the usage of "autoImportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AmlFileSystemResource created on azure + // for more information of creating AmlFileSystemResource, please refer to the document of AmlFileSystemResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + ResourceIdentifier amlFileSystemResourceId = AmlFileSystemResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName); + AmlFileSystemResource amlFileSystem = client.GetAmlFileSystemResource(amlFileSystemResourceId); + + // get the collection of this AutoImportJobResource + AutoImportJobCollection collection = amlFileSystem.GetAutoImportJobs(); + + // invoke the operation + string autoImportJobName = "autojob1"; + NullableResponse response = await collection.GetIfExistsAsync(autoImportJobName); + AutoImportJobResource result = response.HasValue ? response.Value : null; + + if (result == null) + { + Console.WriteLine("Succeeded with null as result"); + } + else + { + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobResource.cs new file mode 100644 index 000000000000..ccf0ddf18843 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_AutoImportJobResource.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.ResourceManager.StorageCache.Models; +using NUnit.Framework; + +namespace Azure.ResourceManager.StorageCache.Samples +{ + public partial class Sample_AutoImportJobResource + { + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Get_AutoImportJobsGet() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Get.json + // this example is just showing the usage of "autoImportJobs_Get" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoImportJobResource created on azure + // for more information of creating AutoImportJobResource, please refer to the document of AutoImportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoImportJobName = "autojob1"; + ResourceIdentifier autoImportJobResourceId = AutoImportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + AutoImportJobResource autoImportJob = client.GetAutoImportJobResource(autoImportJobResourceId); + + // invoke the operation + AutoImportJobResource result = await autoImportJob.GetAsync(); + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Delete_AutoImportJobsDelete() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Delete.json + // this example is just showing the usage of "autoImportJobs_Delete" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoImportJobResource created on azure + // for more information of creating AutoImportJobResource, please refer to the document of AutoImportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoImportJobName = "autojob1"; + ResourceIdentifier autoImportJobResourceId = AutoImportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + AutoImportJobResource autoImportJob = client.GetAutoImportJobResource(autoImportJobResourceId); + + // invoke the operation + await autoImportJob.DeleteAsync(WaitUntil.Completed); + + Console.WriteLine("Succeeded"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Update_AutoImportJobsUpdate() + { + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/autoImportJobs_Update.json + // this example is just showing the usage of "autoImportJobs_Update" operation, for the dependent resources, they will have to be created separately. + + // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line + TokenCredential cred = new DefaultAzureCredential(); + // authenticate your client + ArmClient client = new ArmClient(cred); + + // this example assumes you already have this AutoImportJobResource created on azure + // for more information of creating AutoImportJobResource, please refer to the document of AutoImportJobResource + string subscriptionId = "00000000-0000-0000-0000-000000000000"; + string resourceGroupName = "scgroup"; + string amlFileSystemName = "fs1"; + string autoImportJobName = "autojob1"; + ResourceIdentifier autoImportJobResourceId = AutoImportJobResource.CreateResourceIdentifier(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + AutoImportJobResource autoImportJob = client.GetAutoImportJobResource(autoImportJobResourceId); + + // invoke the operation + AutoImportJobPatch patch = new AutoImportJobPatch + { + AdminStatus = AutoImportJobUpdatePropertiesAdminStatus.Disable, + }; + ArmOperation lro = await autoImportJob.UpdateAsync(WaitUntil.Completed, patch); + AutoImportJobResource result = lro.Value; + + // the variable result is a resource, you could call other operations on this instance as well + // but just for demo, we get its data from this resource instance + AutoImportJobData resourceData = result.Data; + // for demo we just print out the id + Console.WriteLine($"Succeeded on id: {resourceData.Id}"); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheCollection.cs index 4e5dc42687f2..0eb115336296 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheCollection.cs @@ -24,7 +24,7 @@ public partial class Sample_StorageCacheCollection [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_CachesCreateOrUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_CreateOrUpdate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_CreateOrUpdate.json // this example is just showing the usage of "Caches_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -119,7 +119,7 @@ public async Task CreateOrUpdate_CachesCreateOrUpdate() [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_CachesCreateOrUpdateLdapOnly() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_CreateOrUpdate_ldap_only.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_CreateOrUpdate_ldap_only.json // this example is just showing the usage of "Caches_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -199,7 +199,7 @@ public async Task CreateOrUpdate_CachesCreateOrUpdateLdapOnly() [Ignore("Only validating compilation of examples")] public async Task Get_CachesGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Get.json // this example is just showing the usage of "Caches_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -232,7 +232,7 @@ public async Task Get_CachesGet() [Ignore("Only validating compilation of examples")] public async Task GetAll_CachesListByResourceGroup() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_ListByResourceGroup.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_ListByResourceGroup.json // this example is just showing the usage of "Caches_ListByResourceGroup" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -267,7 +267,7 @@ public async Task GetAll_CachesListByResourceGroup() [Ignore("Only validating compilation of examples")] public async Task Exists_CachesGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Get.json // this example is just showing the usage of "Caches_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -296,7 +296,7 @@ public async Task Exists_CachesGet() [Ignore("Only validating compilation of examples")] public async Task GetIfExists_CachesGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Get.json // this example is just showing the usage of "Caches_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobCollection.cs index c58215a6fd80..47c605fb6157 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobCollection.cs @@ -20,7 +20,7 @@ public partial class Sample_StorageCacheImportJobCollection [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_ImportJobsCreateOrUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_CreateOrUpdate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_CreateOrUpdate.json // this example is just showing the usage of "importJobs_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -65,7 +65,7 @@ public async Task CreateOrUpdate_ImportJobsCreateOrUpdate() [Ignore("Only validating compilation of examples")] public async Task Get_ImportJobsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_Get.json // this example is just showing the usage of "importJobs_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -99,7 +99,7 @@ public async Task Get_ImportJobsGet() [Ignore("Only validating compilation of examples")] public async Task GetAll_ImportJobsListByAmlFilesystem() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_ListByAmlFilesystem.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_ListByAmlFilesystem.json // this example is just showing the usage of "importJobs_ListByAmlFileSystem" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -135,7 +135,7 @@ public async Task GetAll_ImportJobsListByAmlFilesystem() [Ignore("Only validating compilation of examples")] public async Task Exists_ImportJobsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_Get.json // this example is just showing the usage of "importJobs_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -165,7 +165,7 @@ public async Task Exists_ImportJobsGet() [Ignore("Only validating compilation of examples")] public async Task GetIfExists_ImportJobsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_Get.json // this example is just showing the usage of "importJobs_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobResource.cs index 366e07bf9e98..f3f8ed02a7c2 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheImportJobResource.cs @@ -20,7 +20,7 @@ public partial class Sample_StorageCacheImportJobResource [Ignore("Only validating compilation of examples")] public async Task Get_ImportJobsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_Get.json // this example is just showing the usage of "importJobs_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -51,7 +51,7 @@ public async Task Get_ImportJobsGet() [Ignore("Only validating compilation of examples")] public async Task Delete_ImportJobsDelete() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJobs_Delete.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJobs_Delete.json // this example is just showing the usage of "importJobs_Delete" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -78,7 +78,7 @@ public async Task Delete_ImportJobsDelete() [Ignore("Only validating compilation of examples")] public async Task Update_ImportJobsUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/importJob_Update.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/importJob_Update.json // this example is just showing the usage of "importJobs_Update" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheResource.cs index 2dc65815f24b..f5916a49770f 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageCacheResource.cs @@ -22,7 +22,7 @@ public partial class Sample_StorageCacheResource [Ignore("Only validating compilation of examples")] public async Task Get_CachesGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Get.json // this example is just showing the usage of "Caches_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -52,7 +52,7 @@ public async Task Get_CachesGet() [Ignore("Only validating compilation of examples")] public async Task Delete_CachesDelete() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Delete.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Delete.json // this example is just showing the usage of "Caches_Delete" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -78,7 +78,7 @@ public async Task Delete_CachesDelete() [Ignore("Only validating compilation of examples")] public async Task Update_CachesUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Update.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Update.json // this example is just showing the usage of "Caches_Update" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -176,7 +176,7 @@ public async Task Update_CachesUpdate() [Ignore("Only validating compilation of examples")] public async Task Update_CachesUpdateLdapOnly() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Update_ldap_only.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Update_ldap_only.json // this example is just showing the usage of "Caches_Update" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -277,7 +277,7 @@ public async Task Update_CachesUpdateLdapOnly() [Ignore("Only validating compilation of examples")] public async Task EnableDebugInfo_CachesDebugInfo() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_DebugInfo.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_DebugInfo.json // this example is just showing the usage of "Caches_DebugInfo" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -303,7 +303,7 @@ public async Task EnableDebugInfo_CachesDebugInfo() [Ignore("Only validating compilation of examples")] public async Task Flush_CachesFlush() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Flush.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Flush.json // this example is just showing the usage of "Caches_Flush" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -329,7 +329,7 @@ public async Task Flush_CachesFlush() [Ignore("Only validating compilation of examples")] public async Task Start_CachesStart() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Start.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Start.json // this example is just showing the usage of "Caches_Start" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -355,7 +355,7 @@ public async Task Start_CachesStart() [Ignore("Only validating compilation of examples")] public async Task Stop_CachesStop() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_Stop.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_Stop.json // this example is just showing the usage of "Caches_Stop" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -381,7 +381,7 @@ public async Task Stop_CachesStop() [Ignore("Only validating compilation of examples")] public async Task StartPrimingJob_StartPrimingJob() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StartPrimingJob.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StartPrimingJob.json // this example is just showing the usage of "Caches_StartPrimingJob" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -408,7 +408,7 @@ public async Task StartPrimingJob_StartPrimingJob() [Ignore("Only validating compilation of examples")] public async Task StopPrimingJob_StopPrimingJob() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StopPrimingJob.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StopPrimingJob.json // this example is just showing the usage of "Caches_StopPrimingJob" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -435,7 +435,7 @@ public async Task StopPrimingJob_StopPrimingJob() [Ignore("Only validating compilation of examples")] public async Task PausePrimingJob_PausePrimingJob() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/PausePrimingJob.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/PausePrimingJob.json // this example is just showing the usage of "Caches_PausePrimingJob" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -462,7 +462,7 @@ public async Task PausePrimingJob_PausePrimingJob() [Ignore("Only validating compilation of examples")] public async Task ResumePrimingJob_ResumePrimingJob() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/ResumePrimingJob.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/ResumePrimingJob.json // this example is just showing the usage of "Caches_ResumePrimingJob" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -489,7 +489,7 @@ public async Task ResumePrimingJob_ResumePrimingJob() [Ignore("Only validating compilation of examples")] public async Task UpgradeFirmware_CachesUpgradeFirmware() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_UpgradeFirmware.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_UpgradeFirmware.json // this example is just showing the usage of "Caches_UpgradeFirmware" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -515,7 +515,7 @@ public async Task UpgradeFirmware_CachesUpgradeFirmware() [Ignore("Only validating compilation of examples")] public async Task UpdateSpaceAllocation_SpaceAllocationPost() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/SpaceAllocation_Post.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/SpaceAllocation_Post.json // this example is just showing the usage of "Caches_SpaceAllocation" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetCollection.cs index 53221a32d00b..331e1bd552de 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetCollection.cs @@ -20,7 +20,7 @@ public partial class Sample_StorageTargetCollection [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_StorageTargetsCreateOrUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -78,7 +78,7 @@ public async Task CreateOrUpdate_StorageTargetsCreateOrUpdate() [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_StorageTargetsCreateOrUpdateBlobNfs() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -128,7 +128,7 @@ public async Task CreateOrUpdate_StorageTargetsCreateOrUpdateBlobNfs() [Ignore("Only validating compilation of examples")] public async Task CreateOrUpdate_StorageTargetsCreateOrUpdateNoJunctions() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -173,7 +173,7 @@ public async Task CreateOrUpdate_StorageTargetsCreateOrUpdateNoJunctions() [Ignore("Only validating compilation of examples")] public async Task Get_StorageTargetsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Get.json // this example is just showing the usage of "StorageTargets_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -207,7 +207,7 @@ public async Task Get_StorageTargetsGet() [Ignore("Only validating compilation of examples")] public async Task GetAll_StorageTargetsList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_ListByCache.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_ListByCache.json // this example is just showing the usage of "StorageTargets_ListByCache" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -243,7 +243,7 @@ public async Task GetAll_StorageTargetsList() [Ignore("Only validating compilation of examples")] public async Task Exists_StorageTargetsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Get.json // this example is just showing the usage of "StorageTargets_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -273,7 +273,7 @@ public async Task Exists_StorageTargetsGet() [Ignore("Only validating compilation of examples")] public async Task GetIfExists_StorageTargetsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Get.json // this example is just showing the usage of "StorageTargets_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetResource.cs index 70bc3c0b5f03..b431a40bc338 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_StorageTargetResource.cs @@ -20,7 +20,7 @@ public partial class Sample_StorageTargetResource [Ignore("Only validating compilation of examples")] public async Task Get_StorageTargetsGet() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Get.json // this example is just showing the usage of "StorageTargets_Get" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -51,7 +51,7 @@ public async Task Get_StorageTargetsGet() [Ignore("Only validating compilation of examples")] public async Task Delete_StorageTargetsDelete() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Delete.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Delete.json // this example is just showing the usage of "StorageTargets_Delete" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -78,7 +78,7 @@ public async Task Delete_StorageTargetsDelete() [Ignore("Only validating compilation of examples")] public async Task Update_StorageTargetsCreateOrUpdate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -133,7 +133,7 @@ public async Task Update_StorageTargetsCreateOrUpdate() [Ignore("Only validating compilation of examples")] public async Task Update_StorageTargetsCreateOrUpdateBlobNfs() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate_BlobNfs.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -180,7 +180,7 @@ public async Task Update_StorageTargetsCreateOrUpdateBlobNfs() [Ignore("Only validating compilation of examples")] public async Task Update_StorageTargetsCreateOrUpdateNoJunctions() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_CreateOrUpdate_NoJunctions.json // this example is just showing the usage of "StorageTargets_CreateOrUpdate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -222,7 +222,7 @@ public async Task Update_StorageTargetsCreateOrUpdateNoJunctions() [Ignore("Only validating compilation of examples")] public async Task RefreshDns_CachesDnsRefresh() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_DnsRefresh.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_DnsRefresh.json // this example is just showing the usage of "StorageTargets_DnsRefresh" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -249,7 +249,7 @@ public async Task RefreshDns_CachesDnsRefresh() [Ignore("Only validating compilation of examples")] public async Task Flush_StorageTargetsFlush() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Flush.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Flush.json // this example is just showing the usage of "StorageTargets_Flush" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -276,7 +276,7 @@ public async Task Flush_StorageTargetsFlush() [Ignore("Only validating compilation of examples")] public async Task Suspend_StorageTargetsSuspend() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Suspend.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Suspend.json // this example is just showing the usage of "StorageTargets_Suspend" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -303,7 +303,7 @@ public async Task Suspend_StorageTargetsSuspend() [Ignore("Only validating compilation of examples")] public async Task Resume_StorageTargetsResume() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Resume.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Resume.json // this example is just showing the usage of "StorageTargets_Resume" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -330,7 +330,7 @@ public async Task Resume_StorageTargetsResume() [Ignore("Only validating compilation of examples")] public async Task Invalidate_StorageTargetsInvalidate() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_Invalidate.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_Invalidate.json // this example is just showing the usage of "StorageTargets_Invalidate" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -357,7 +357,7 @@ public async Task Invalidate_StorageTargetsInvalidate() [Ignore("Only validating compilation of examples")] public async Task RestoreDefaults_StorageTargetsRestoreDefaults() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/StorageTargets_RestoreDefaults.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/StorageTargets_RestoreDefaults.json // this example is just showing the usage of "StorageTargets_RestoreDefaults" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_SubscriptionResourceExtensions.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_SubscriptionResourceExtensions.cs index f5e205cdf668..d53ec5a92f38 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_SubscriptionResourceExtensions.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/samples/Generated/Samples/Sample_SubscriptionResourceExtensions.cs @@ -21,7 +21,7 @@ public partial class Sample_SubscriptionResourceExtensions [Ignore("Only validating compilation of examples")] public async Task GetAmlFileSystems_AmlFilesystemsList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/amlFilesystems_List.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/amlFilesystems_List.json // this example is just showing the usage of "amlFilesystems_List" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -52,7 +52,7 @@ public async Task GetAmlFileSystems_AmlFilesystemsList() [Ignore("Only validating compilation of examples")] public async Task CheckAmlFSSubnets_CheckAmlFSSubnets() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/checkAmlFSSubnets.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/checkAmlFSSubnets.json // this example is just showing the usage of "CheckAmlFSSubnets" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -82,7 +82,7 @@ public async Task CheckAmlFSSubnets_CheckAmlFSSubnets() [Ignore("Only validating compilation of examples")] public async Task GetRequiredAmlFSSubnetsSize_GetRequiredAmlFilesystemSubnetsSize() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/getRequiredAmlFSSubnetsSize.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/getRequiredAmlFSSubnetsSize.json // this example is just showing the usage of "GetRequiredAmlFSSubnetsSize" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -106,7 +106,7 @@ public async Task GetRequiredAmlFSSubnetsSize_GetRequiredAmlFilesystemSubnetsSiz [Ignore("Only validating compilation of examples")] public async Task GetStorageCacheSkus_SkusList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Skus_List.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Skus_List.json // this example is just showing the usage of "Skus_List" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -133,7 +133,7 @@ public async Task GetStorageCacheSkus_SkusList() [Ignore("Only validating compilation of examples")] public async Task GetUsageModels_UsageModelsList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/UsageModels_List.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/UsageModels_List.json // this example is just showing the usage of "UsageModels_List" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -160,7 +160,7 @@ public async Task GetUsageModels_UsageModelsList() [Ignore("Only validating compilation of examples")] public async Task GetStorageCacheUsages_AscUsagesList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/AscResourceUsages_Get.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/AscResourceUsages_Get.json // this example is just showing the usage of "AscUsages_List" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line @@ -188,7 +188,7 @@ public async Task GetStorageCacheUsages_AscUsagesList() [Ignore("Only validating compilation of examples")] public async Task GetStorageCaches_CachesList() { - // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2024-03-01/examples/Caches_List.json + // Generated from example definition: specification/storagecache/resource-manager/Microsoft.StorageCache/stable/2025-07-01/examples/Caches_List.json // this example is just showing the usage of "Caches_List" operation, for the dependent resources, they will have to be created separately. // get your azure access token, for more details of how Azure SDK get your access token, please refer to https://learn.microsoft.com/en-us/dotnet/azure/sdk/authentication?tabs=command-line diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemCollection.cs index ab7e83f9fa5e..f566d7672fe7 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemCollection.cs @@ -65,7 +65,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -114,7 +114,7 @@ public virtual async Task> CreateOrUpdateAsy /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -163,7 +163,7 @@ public virtual ArmOperation CreateOrUpdate(WaitUntil wait /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -208,7 +208,7 @@ public virtual async Task> GetAsync(string amlFi /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -253,7 +253,7 @@ public virtual Response Get(string amlFileSystemName, Can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -283,7 +283,7 @@ public virtual AsyncPageable GetAllAsync(CancellationToke /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -313,7 +313,7 @@ public virtual Pageable GetAll(CancellationToken cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -356,7 +356,7 @@ public virtual async Task> ExistsAsync(string amlFileSystemName, /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -399,7 +399,7 @@ public virtual Response Exists(string amlFileSystemName, CancellationToken /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -444,7 +444,7 @@ public virtual async Task> GetIfExistsAs /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemResource.cs index 884817bc17a0..7bf46944754c 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AmlFileSystemResource.cs @@ -90,6 +90,75 @@ internal static void ValidateResourceId(ResourceIdentifier id) throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid resource type {0} expected {1}", id.ResourceType, ResourceType), nameof(id)); } + /// Gets a collection of AutoExportJobResources in the AmlFileSystem. + /// An object representing collection of AutoExportJobResources and their operations over a AutoExportJobResource. + public virtual AutoExportJobCollection GetAutoExportJobs() + { + return GetCachedClient(client => new AutoExportJobCollection(client, Id)); + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + [ForwardsClientCalls] + public virtual async Task> GetAutoExportJobAsync(string autoExportJobName, CancellationToken cancellationToken = default) + { + return await GetAutoExportJobs().GetAsync(autoExportJobName, cancellationToken).ConfigureAwait(false); + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + [ForwardsClientCalls] + public virtual Response GetAutoExportJob(string autoExportJobName, CancellationToken cancellationToken = default) + { + return GetAutoExportJobs().Get(autoExportJobName, cancellationToken); + } + /// Gets a collection of StorageCacheImportJobResources in the AmlFileSystem. /// An object representing collection of StorageCacheImportJobResources and their operations over a StorageCacheImportJobResource. public virtual StorageCacheImportJobCollection GetStorageCacheImportJobs() @@ -110,7 +179,7 @@ public virtual StorageCacheImportJobCollection GetStorageCacheImportJobs() /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -141,7 +210,7 @@ public virtual async Task> GetStorageCac /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -159,6 +228,75 @@ public virtual Response GetStorageCacheImportJob( return GetStorageCacheImportJobs().Get(importJobName, cancellationToken); } + /// Gets a collection of AutoImportJobResources in the AmlFileSystem. + /// An object representing collection of AutoImportJobResources and their operations over a AutoImportJobResource. + public virtual AutoImportJobCollection GetAutoImportJobs() + { + return GetCachedClient(client => new AutoImportJobCollection(client, Id)); + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + [ForwardsClientCalls] + public virtual async Task> GetAutoImportJobAsync(string autoImportJobName, CancellationToken cancellationToken = default) + { + return await GetAutoImportJobs().GetAsync(autoImportJobName, cancellationToken).ConfigureAwait(false); + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + [ForwardsClientCalls] + public virtual Response GetAutoImportJob(string autoImportJobName, CancellationToken cancellationToken = default) + { + return GetAutoImportJobs().Get(autoImportJobName, cancellationToken); + } + /// /// Returns an AML file system. /// @@ -172,7 +310,7 @@ public virtual Response GetStorageCacheImportJob( /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -212,7 +350,7 @@ public virtual async Task> GetAsync(Cancellation /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -252,7 +390,7 @@ public virtual Response Get(CancellationToken cancellatio /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -294,7 +432,7 @@ public virtual async Task DeleteAsync(WaitUntil waitUntil, Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -336,7 +474,7 @@ public virtual ArmOperation Delete(WaitUntil waitUntil, CancellationToken cancel /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -382,7 +520,7 @@ public virtual async Task> UpdateAsync(WaitU /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -428,7 +566,7 @@ public virtual ArmOperation Update(WaitUntil waitUntil, A /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -467,7 +605,7 @@ public virtual async Task ArchiveAsync(AmlFileSystemArchiveContent con /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -506,7 +644,7 @@ public virtual Response Archive(AmlFileSystemArchiveContent content = null, Canc /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -544,7 +682,7 @@ public virtual async Task CancelArchiveAsync(CancellationToken cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -582,7 +720,7 @@ public virtual Response CancelArchive(CancellationToken cancellationToken = defa /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -644,7 +782,7 @@ public virtual async Task> AddTagAsync(string ke /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -706,7 +844,7 @@ public virtual Response AddTag(string key, string value, /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -763,7 +901,7 @@ public virtual async Task> SetTagsAsync(IDiction /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -820,7 +958,7 @@ public virtual Response SetTags(IDictionary /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -880,7 +1018,7 @@ public virtual async Task> RemoveTagAsync(string /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/ArmStorageCacheModelFactory.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/ArmStorageCacheModelFactory.cs index c99dc29f12c3..97c86bf1ee78 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/ArmStorageCacheModelFactory.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/ArmStorageCacheModelFactory.cs @@ -156,6 +156,65 @@ public static AmlFileSystemRootSquashSettings AmlFileSystemRootSquashSettings(Am serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// The id. + /// The name. + /// The resourceType. + /// The systemData. + /// The tags. + /// The location. + /// ARM provisioning state. + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + /// An array of blob paths/prefixes that get auto exported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths for now is 1. + /// The operational state of auto export. InProgress indicates the export is running. Disabling indicates the user has requested to disable the export but the disabling is still in progress. Disabled indicates auto export has been disabled. DisableFailed indicates the disabling has failed. Failed means the export was unable to continue, due to a fatal error. + /// Server-defined status code for auto export job. + /// Server-defined status message for auto export job. + /// Total files exported since the start of the export. This is accumulative, some files may be counted repeatedly. + /// Total data (in MiB) exported since the start of the export. This is accumulative, some files may be counted repeatedly. + /// Total files failed to be export since the last successfully completed iteration. This is accumulative, some files may be counted repeatedly. + /// Number of iterations completed since the start of the export. + /// Time (in UTC) of the last successfully completed export iteration. Look at logging container for details. + /// Files discovered for export in current iteration. It may increase while more export items are found. + /// Data (in MiB) discovered for export in current iteration. It may increase while more export items are found. + /// Files that have been exported in current iteration. + /// Data (in MiB) that have been exported in current iteration. + /// Files failed to export in current iteration. + /// The time (in UTC) the latest auto export job started. + /// The time (in UTC) of the last completed auto export job. + /// A new instance for mocking. + public static AutoExportJobData AutoExportJobData(ResourceIdentifier id = null, string name = null, ResourceType resourceType = default, SystemData systemData = null, IDictionary tags = null, AzureLocation location = default, AutoExportJobProvisioningStateType? provisioningState = null, AutoExportJobAdminStatus? adminStatus = null, IEnumerable autoExportPrefixes = null, AutoExportStatusType? state = null, string statusCode = null, string statusMessage = null, long? totalFilesExported = null, long? totalMiBExported = null, long? totalFilesFailed = null, int? exportIterationCount = null, DateTimeOffset? lastSuccessfulIterationCompletionTimeUTC = null, long? currentIterationFilesDiscovered = null, long? currentIterationMiBDiscovered = null, long? currentIterationFilesExported = null, long? currentIterationMiBExported = null, long? currentIterationFilesFailed = null, DateTimeOffset? lastStartedTimeUTC = null, DateTimeOffset? lastCompletionTimeUTC = null) + { + tags ??= new Dictionary(); + autoExportPrefixes ??= new List(); + + return new AutoExportJobData( + id, + name, + resourceType, + systemData, + tags, + location, + provisioningState, + adminStatus, + autoExportPrefixes?.ToList(), + state, + statusCode, + statusMessage, + totalFilesExported, + totalMiBExported, + totalFilesFailed, + exportIterationCount, + lastSuccessfulIterationCompletionTimeUTC, + currentIterationFilesDiscovered, + currentIterationMiBDiscovered, + currentIterationFilesExported, + currentIterationMiBExported, + currentIterationFilesFailed, + lastStartedTimeUTC, + lastCompletionTimeUTC, + serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// The id. /// The name. @@ -164,21 +223,28 @@ public static AmlFileSystemRootSquashSettings AmlFileSystemRootSquashSettings(Am /// The tags. /// The location. /// ARM provisioning state. + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. By default it is set to 'Active'. /// An array of blob paths/prefixes that get imported into the cluster namespace. It has '/' as the default value. /// How the import job will handle conflicts. For example, if the import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or was not previously imported. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/ for a thorough explanation of these resolution modes. /// Total non-conflict oriented errors the import job will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately and is the default. - /// The state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. + /// The operational state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. /// The status message of the import job. /// The total blob objects walked. /// A recent and frequently updated rate of blobs walked per second. /// The total blobs that have been imported since import began. + /// New or modified files that have been imported into the filesystem. + /// New or modified directories that have been imported into the filesystem. + /// Newly added symbolic links into the filesystem. + /// Files that already exist in the filesystem and have not been modified. + /// Directories that already exist in the filesystem and have not been modified. + /// Symbolic links that already exist in the filesystem and have not been modified. /// A recent and frequently updated rate of total files, directories, and symlinks imported per second. - /// The time of the last completed archive operation. - /// The time the latest archive operation started. + /// The time (in UTC) of the last completed import job. + /// The time (in UTC) the latest import job started. /// Number of errors in the import job. /// Number of conflicts in the import job. /// A new instance for mocking. - public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdentifier id = null, string name = null, ResourceType resourceType = default, SystemData systemData = null, IDictionary tags = null, AzureLocation location = default, ImportJobProvisioningStateType? provisioningState = null, IEnumerable importPrefixes = null, ConflictResolutionMode? conflictResolutionMode = null, int? maximumErrors = null, ImportStatusType? state = null, string statusMessage = null, long? totalBlobsWalked = null, long? blobsWalkedPerSecond = null, long? totalBlobsImported = null, long? blobsImportedPerSecond = null, DateTimeOffset? lastCompletionOn = null, DateTimeOffset? lastStartedOn = null, int? totalErrors = null, int? totalConflicts = null) + public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdentifier id = null, string name = null, ResourceType resourceType = default, SystemData systemData = null, IDictionary tags = null, AzureLocation location = default, ImportJobProvisioningStateType? provisioningState = null, ImportJobAdminStatus? adminStatus = null, IEnumerable importPrefixes = null, ConflictResolutionMode? conflictResolutionMode = null, int? maximumErrors = null, ImportStatusType? state = null, string statusMessage = null, long? totalBlobsWalked = null, long? blobsWalkedPerSecond = null, long? totalBlobsImported = null, long? importedFiles = null, long? importedDirectories = null, long? importedSymlinks = null, long? preexistingFiles = null, long? preexistingDirectories = null, long? preexistingSymlinks = null, long? blobsImportedPerSecond = null, DateTimeOffset? lastCompletionOn = null, DateTimeOffset? lastStartedOn = null, int? totalErrors = null, int? totalConflicts = null) { tags ??= new Dictionary(); importPrefixes ??= new List(); @@ -191,6 +257,7 @@ public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdenti tags, location, provisioningState, + adminStatus, importPrefixes?.ToList(), conflictResolutionMode, maximumErrors, @@ -199,6 +266,12 @@ public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdenti totalBlobsWalked, blobsWalkedPerSecond, totalBlobsImported, + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, blobsImportedPerSecond, lastCompletionOn, lastStartedOn, @@ -207,6 +280,111 @@ public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdenti serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// The id. + /// The name. + /// The resourceType. + /// The systemData. + /// The tags. + /// The location. + /// ARM provisioning state. + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + /// An array of blob paths/prefixes that get auto imported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths is 100. + /// How the auto import job will handle conflicts. For example, if the auto import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the auto import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the auto import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or is currently released. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/blob-integration#conflict-resolution-mode for a thorough explanation of these resolution modes. + /// Whether or not to enable deletions during auto import. This only affects overwrite-dirty. + /// Total non-conflict-oriented errors (e.g., OS errors) Import will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately on any error. + /// The state of the auto import operation. + /// Date and time of when the currently running full scan began. + /// Date and time of when the full scan ended. + /// Total number of blobs walked during full scan. + /// Rate of blobs walked during full scan. + /// Total number of blobs imported during full scan. + /// Rate of blob import during full scan. + /// Number of files imported during full scan. + /// Number of directories imported during full scan. + /// Number of symlinks imported during full scan. + /// Number of preexisting files during full scan. + /// Number of preexisting directories during full scan. + /// Number of preexisting symlinks during full scan. + /// Total errors encountered during full scan. + /// Total conflicts encountered during full scan. + /// The storage account blob change feed status of the auto import job. + /// The time (in UTC) the latest auto import job started. + /// The time (in UTC) of the last completed auto import job. + /// A new instance for mocking. + public static AutoImportJobData AutoImportJobData(ResourceIdentifier id = null, string name = null, ResourceType resourceType = default, SystemData systemData = null, IDictionary tags = null, AzureLocation location = default, AutoImportJobPropertiesProvisioningState? provisioningState = null, AutoImportJobPropertiesAdminStatus? adminStatus = null, IEnumerable autoImportPrefixes = null, ConflictResolutionMode? conflictResolutionMode = null, bool? enableDeletions = null, long? maximumErrors = null, AutoImportJobState? state = null, DateTimeOffset? scanStartOn = null, DateTimeOffset? scanEndOn = null, long? totalBlobsWalked = null, long? rateOfBlobWalk = null, long? totalBlobsImported = null, long? rateOfBlobImport = null, long? importedFiles = null, long? importedDirectories = null, long? importedSymlinks = null, long? preexistingFiles = null, long? preexistingDirectories = null, long? preexistingSymlinks = null, long? totalErrors = null, long? totalConflicts = null, AutoImportJobPropertiesStatusBlobSyncEvents blobSyncEvents = null, DateTimeOffset? lastStartedTimeUTC = null, DateTimeOffset? lastCompletionTimeUTC = null) + { + tags ??= new Dictionary(); + autoImportPrefixes ??= new List(); + + return new AutoImportJobData( + id, + name, + resourceType, + systemData, + tags, + location, + provisioningState, + adminStatus, + autoImportPrefixes?.ToList(), + conflictResolutionMode, + enableDeletions, + maximumErrors, + state, + scanStartOn, + scanEndOn, + totalBlobsWalked, + rateOfBlobWalk, + totalBlobsImported, + rateOfBlobImport, + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, + totalErrors, + totalConflicts, + blobSyncEvents, + lastStartedTimeUTC, + lastCompletionTimeUTC, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Number of files imported during auto import. + /// Number of directories imported during auto import. + /// Number of symlinks imported during auto import. + /// Number of preexisting files during auto import. + /// Number of preexisting directories during auto import. + /// Number of preexisting symlinks during auto import. + /// Total number of blobs imported during auto import. + /// Rate of blob import per second during auto import. + /// Total errors encountered during auto import. + /// Total conflicts encountered during auto import. + /// Number of deletions during auto import. + /// Date and time of the last Change Feed event consumed. + /// Date and time when last fully synchronized. + /// A new instance for mocking. + public static AutoImportJobPropertiesStatusBlobSyncEvents AutoImportJobPropertiesStatusBlobSyncEvents(long? importedFiles = null, long? importedDirectories = null, long? importedSymlinks = null, long? preexistingFiles = null, long? preexistingDirectories = null, long? preexistingSymlinks = null, long? totalBlobsImported = null, long? rateOfBlobImport = null, long? totalErrors = null, long? totalConflicts = null, long? deletions = null, DateTimeOffset? lastChangeFeedEventConsumedOn = null, DateTimeOffset? lastTimeFullySynchronized = null) + { + return new AutoImportJobPropertiesStatusBlobSyncEvents( + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, + totalBlobsImported, + rateOfBlobImport, + totalErrors, + totalConflicts, + deletions, + lastChangeFeedEventConsumedOn, + lastTimeFullySynchronized, + serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// The number of available IP addresses that are required for the AML file system. /// A new instance for mocking. @@ -536,6 +714,34 @@ public static StorageTargetData StorageTargetData(ResourceIdentifier id = null, serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// The id. + /// The name. + /// The resourceType. + /// The systemData. + /// The tags. + /// The location. + /// ARM provisioning state. + /// An array of blob paths/prefixes that get imported into the cluster namespace. It has '/' as the default value. + /// How the import job will handle conflicts. For example, if the import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or was not previously imported. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/ for a thorough explanation of these resolution modes. + /// Total non-conflict oriented errors the import job will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately and is the default. + /// The state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. + /// The status message of the import job. + /// The total blob objects walked. + /// A recent and frequently updated rate of blobs walked per second. + /// The total blobs that have been imported since import began. + /// A recent and frequently updated rate of total files, directories, and symlinks imported per second. + /// The time of the last completed archive operation. + /// The time the latest archive operation started. + /// Number of errors in the import job. + /// Number of conflicts in the import job. + /// A new instance for mocking. + [EditorBrowsable(EditorBrowsableState.Never)] + public static StorageCacheImportJobData StorageCacheImportJobData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, IDictionary tags, AzureLocation location, ImportJobProvisioningStateType? provisioningState, IEnumerable importPrefixes, ConflictResolutionMode? conflictResolutionMode, int? maximumErrors, ImportStatusType? state, string statusMessage, long? totalBlobsWalked, long? blobsWalkedPerSecond, long? totalBlobsImported, long? blobsImportedPerSecond, DateTimeOffset? lastCompletionOn, DateTimeOffset? lastStartedOn, int? totalErrors, int? totalConflicts) + { + return StorageCacheImportJobData(id: id, name: name, resourceType: resourceType, systemData: systemData, tags: tags, location: location, provisioningState: provisioningState, adminStatus: default, importPrefixes: importPrefixes, conflictResolutionMode: conflictResolutionMode, maximumErrors: maximumErrors, state: state, statusMessage: statusMessage, totalBlobsWalked: totalBlobsWalked, blobsWalkedPerSecond: blobsWalkedPerSecond, totalBlobsImported: totalBlobsImported, importedFiles: default, importedDirectories: default, importedSymlinks: default, preexistingFiles: default, preexistingDirectories: default, preexistingSymlinks: default, blobsImportedPerSecond: blobsImportedPerSecond, lastCompletionOn: lastCompletionOn, lastStartedOn: lastStartedOn, totalErrors: totalErrors, totalConflicts: totalConflicts); + } + /// Initializes a new instance of . /// The id. /// The name. diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobCollection.cs new file mode 100644 index 000000000000..3bb4de8535ed --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobCollection.cs @@ -0,0 +1,493 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Autorest.CSharp.Core; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A class representing a collection of and their operations. + /// Each in the collection will belong to the same instance of . + /// To get an instance call the GetAutoExportJobs method from an instance of . + /// + public partial class AutoExportJobCollection : ArmCollection, IEnumerable, IAsyncEnumerable + { + private readonly ClientDiagnostics _autoExportJobautoExportJobsClientDiagnostics; + private readonly AutoExportJobsRestOperations _autoExportJobautoExportJobsRestClient; + + /// Initializes a new instance of the class for mocking. + protected AutoExportJobCollection() + { + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The identifier of the parent resource that is the target of operations. + internal AutoExportJobCollection(ArmClient client, ResourceIdentifier id) : base(client, id) + { + _autoExportJobautoExportJobsClientDiagnostics = new ClientDiagnostics("Azure.ResourceManager.StorageCache", AutoExportJobResource.ResourceType.Namespace, Diagnostics); + TryGetApiVersion(AutoExportJobResource.ResourceType, out string autoExportJobautoExportJobsApiVersion); + _autoExportJobautoExportJobsRestClient = new AutoExportJobsRestOperations(Pipeline, Diagnostics.ApplicationId, Endpoint, autoExportJobautoExportJobsApiVersion); +#if DEBUG + ValidateResourceId(Id); +#endif + } + + internal static void ValidateResourceId(ResourceIdentifier id) + { + if (id.ResourceType != AmlFileSystemResource.ResourceType) + throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid resource type {0} expected {1}", id.ResourceType, AmlFileSystemResource.ResourceType), nameof(id)); + } + + /// + /// Create or update an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_CreateOrUpdate + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// or is null. + public virtual async Task> CreateOrUpdateAsync(WaitUntil waitUntil, string autoExportJobName, AutoExportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.CreateOrUpdate"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.CreateOrUpdateAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, data, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(new AutoExportJobOperationSource(Client), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateCreateOrUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, data).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Create or update an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_CreateOrUpdate + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// or is null. + public virtual ArmOperation CreateOrUpdate(WaitUntil waitUntil, string autoExportJobName, AutoExportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.CreateOrUpdate"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.CreateOrUpdate(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, data, cancellationToken); + var operation = new StorageCacheArmOperation(new AutoExportJobOperationSource(Client), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateCreateOrUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, data).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletion(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> GetAsync(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.Get"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken).ConfigureAwait(false); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual Response Get(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.Get"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns all the auto export jobs the user has access to under an AML File System. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs + /// + /// + /// Operation Id + /// autoExportJobs_ListByAmlFileSystem + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + /// An async collection of that may take multiple service requests to iterate over. + public virtual AsyncPageable GetAllAsync(CancellationToken cancellationToken = default) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => _autoExportJobautoExportJobsRestClient.CreateListByAmlFileSystemRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => _autoExportJobautoExportJobsRestClient.CreateListByAmlFileSystemNextPageRequest(nextLink, Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, NextPageRequest, e => new AutoExportJobResource(Client, AutoExportJobData.DeserializeAutoExportJobData(e)), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, "AutoExportJobCollection.GetAll", "value", "nextLink", cancellationToken); + } + + /// + /// Returns all the auto export jobs the user has access to under an AML File System. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs + /// + /// + /// Operation Id + /// autoExportJobs_ListByAmlFileSystem + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + /// A collection of that may take multiple service requests to iterate over. + public virtual Pageable GetAll(CancellationToken cancellationToken = default) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => _autoExportJobautoExportJobsRestClient.CreateListByAmlFileSystemRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => _autoExportJobautoExportJobsRestClient.CreateListByAmlFileSystemNextPageRequest(nextLink, Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, NextPageRequest, e => new AutoExportJobResource(Client, AutoExportJobData.DeserializeAutoExportJobData(e)), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, "AutoExportJobCollection.GetAll", "value", "nextLink", cancellationToken); + } + + /// + /// Checks to see if the resource exists in azure. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> ExistsAsync(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.Exists"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(response.Value != null, response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Checks to see if the resource exists in azure. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual Response Exists(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.Exists"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken: cancellationToken); + return Response.FromValue(response.Value != null, response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Tries to get details for this resource from the service. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> GetIfExistsAsync(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.GetIfExists"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken: cancellationToken).ConfigureAwait(false); + if (response.Value == null) + return new NoValueResponse(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Tries to get details for this resource from the service. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual NullableResponse GetIfExists(string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobCollection.GetIfExists"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoExportJobName, cancellationToken: cancellationToken); + if (response.Value == null) + return new NoValueResponse(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetAll().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetAll().GetEnumerator(); + } + + IAsyncEnumerator IAsyncEnumerable.GetAsyncEnumerator(CancellationToken cancellationToken) + { + return GetAllAsync(cancellationToken: cancellationToken).GetAsyncEnumerator(cancellationToken); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.Serialization.cs new file mode 100644 index 000000000000..0ce6cde83f63 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.Serialization.cs @@ -0,0 +1,482 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.ResourceManager.Models; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + public partial class AutoExportJobData : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobData)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + if (options.Format != "W" && Optional.IsDefined(ProvisioningState)) + { + writer.WritePropertyName("provisioningState"u8); + writer.WriteStringValue(ProvisioningState.Value.ToString()); + } + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } + if (Optional.IsCollectionDefined(AutoExportPrefixes)) + { + writer.WritePropertyName("autoExportPrefixes"u8); + writer.WriteStartArray(); + foreach (var item in AutoExportPrefixes) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + writer.WritePropertyName("status"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(State)) + { + writer.WritePropertyName("state"u8); + writer.WriteStringValue(State.Value.ToString()); + } + if (options.Format != "W" && Optional.IsDefined(StatusCode)) + { + writer.WritePropertyName("statusCode"u8); + writer.WriteStringValue(StatusCode); + } + if (options.Format != "W" && Optional.IsDefined(StatusMessage)) + { + writer.WritePropertyName("statusMessage"u8); + writer.WriteStringValue(StatusMessage); + } + if (options.Format != "W" && Optional.IsDefined(TotalFilesExported)) + { + writer.WritePropertyName("totalFilesExported"u8); + writer.WriteNumberValue(TotalFilesExported.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalMiBExported)) + { + writer.WritePropertyName("totalMiBExported"u8); + writer.WriteNumberValue(TotalMiBExported.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalFilesFailed)) + { + writer.WritePropertyName("totalFilesFailed"u8); + writer.WriteNumberValue(TotalFilesFailed.Value); + } + if (options.Format != "W" && Optional.IsDefined(ExportIterationCount)) + { + writer.WritePropertyName("exportIterationCount"u8); + writer.WriteNumberValue(ExportIterationCount.Value); + } + if (options.Format != "W" && Optional.IsDefined(LastSuccessfulIterationCompletionTimeUTC)) + { + writer.WritePropertyName("lastSuccessfulIterationCompletionTimeUTC"u8); + writer.WriteStringValue(LastSuccessfulIterationCompletionTimeUTC.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(CurrentIterationFilesDiscovered)) + { + writer.WritePropertyName("currentIterationFilesDiscovered"u8); + writer.WriteNumberValue(CurrentIterationFilesDiscovered.Value); + } + if (options.Format != "W" && Optional.IsDefined(CurrentIterationMiBDiscovered)) + { + writer.WritePropertyName("currentIterationMiBDiscovered"u8); + writer.WriteNumberValue(CurrentIterationMiBDiscovered.Value); + } + if (options.Format != "W" && Optional.IsDefined(CurrentIterationFilesExported)) + { + writer.WritePropertyName("currentIterationFilesExported"u8); + writer.WriteNumberValue(CurrentIterationFilesExported.Value); + } + if (options.Format != "W" && Optional.IsDefined(CurrentIterationMiBExported)) + { + writer.WritePropertyName("currentIterationMiBExported"u8); + writer.WriteNumberValue(CurrentIterationMiBExported.Value); + } + if (options.Format != "W" && Optional.IsDefined(CurrentIterationFilesFailed)) + { + writer.WritePropertyName("currentIterationFilesFailed"u8); + writer.WriteNumberValue(CurrentIterationFilesFailed.Value); + } + if (options.Format != "W" && Optional.IsDefined(LastStartedTimeUTC)) + { + writer.WritePropertyName("lastStartedTimeUTC"u8); + writer.WriteStringValue(LastStartedTimeUTC.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(LastCompletionTimeUTC)) + { + writer.WritePropertyName("lastCompletionTimeUTC"u8); + writer.WriteStringValue(LastCompletionTimeUTC.Value, "O"); + } + writer.WriteEndObject(); + writer.WriteEndObject(); + } + + AutoExportJobData IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobData)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoExportJobData(document.RootElement, options); + } + + internal static AutoExportJobData DeserializeAutoExportJobData(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary tags = default; + AzureLocation location = default; + ResourceIdentifier id = default; + string name = default; + ResourceType type = default; + SystemData systemData = default; + AutoExportJobProvisioningStateType? provisioningState = default; + AutoExportJobAdminStatus? adminStatus = default; + IList autoExportPrefixes = default; + AutoExportStatusType? state = default; + string statusCode = default; + string statusMessage = default; + long? totalFilesExported = default; + long? totalMiBExported = default; + long? totalFilesFailed = default; + int? exportIterationCount = default; + DateTimeOffset? lastSuccessfulIterationCompletionTimeUTC = default; + long? currentIterationFilesDiscovered = default; + long? currentIterationMiBDiscovered = default; + long? currentIterationFilesExported = default; + long? currentIterationMiBExported = default; + long? currentIterationFilesFailed = default; + DateTimeOffset? lastStartedTimeUTC = default; + DateTimeOffset? lastCompletionTimeUTC = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + tags = dictionary; + continue; + } + if (property.NameEquals("location"u8)) + { + location = new AzureLocation(property.Value.GetString()); + continue; + } + if (property.NameEquals("id"u8)) + { + id = new ResourceIdentifier(property.Value.GetString()); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = new ResourceType(property.Value.GetString()); + continue; + } + if (property.NameEquals("systemData"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + systemData = JsonSerializer.Deserialize(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("properties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("provisioningState"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + provisioningState = new AutoExportJobProvisioningStateType(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new AutoExportJobAdminStatus(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("autoExportPrefixes"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property0.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + autoExportPrefixes = array; + continue; + } + if (property0.NameEquals("status"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + property0.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property1 in property0.Value.EnumerateObject()) + { + if (property1.NameEquals("state"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + state = new AutoExportStatusType(property1.Value.GetString()); + continue; + } + if (property1.NameEquals("statusCode"u8)) + { + statusCode = property1.Value.GetString(); + continue; + } + if (property1.NameEquals("statusMessage"u8)) + { + statusMessage = property1.Value.GetString(); + continue; + } + if (property1.NameEquals("totalFilesExported"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalFilesExported = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("totalMiBExported"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalMiBExported = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("totalFilesFailed"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalFilesFailed = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("exportIterationCount"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exportIterationCount = property1.Value.GetInt32(); + continue; + } + if (property1.NameEquals("lastSuccessfulIterationCompletionTimeUTC"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastSuccessfulIterationCompletionTimeUTC = property1.Value.GetDateTimeOffset("O"); + continue; + } + if (property1.NameEquals("currentIterationFilesDiscovered"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentIterationFilesDiscovered = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("currentIterationMiBDiscovered"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentIterationMiBDiscovered = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("currentIterationFilesExported"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentIterationFilesExported = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("currentIterationMiBExported"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentIterationMiBExported = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("currentIterationFilesFailed"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentIterationFilesFailed = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("lastStartedTimeUTC"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastStartedTimeUTC = property1.Value.GetDateTimeOffset("O"); + continue; + } + if (property1.NameEquals("lastCompletionTimeUTC"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastCompletionTimeUTC = property1.Value.GetDateTimeOffset("O"); + continue; + } + } + continue; + } + } + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoExportJobData( + id, + name, + type, + systemData, + tags ?? new ChangeTrackingDictionary(), + location, + provisioningState, + adminStatus, + autoExportPrefixes ?? new ChangeTrackingList(), + state, + statusCode, + statusMessage, + totalFilesExported, + totalMiBExported, + totalFilesFailed, + exportIterationCount, + lastSuccessfulIterationCompletionTimeUTC, + currentIterationFilesDiscovered, + currentIterationMiBDiscovered, + currentIterationFilesExported, + currentIterationMiBExported, + currentIterationFilesFailed, + lastStartedTimeUTC, + lastCompletionTimeUTC, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoExportJobData)} does not support writing '{options.Format}' format."); + } + } + + AutoExportJobData IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoExportJobData(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoExportJobData)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.cs new file mode 100644 index 000000000000..7ba52bec1f36 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobData.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core; +using Azure.ResourceManager.Models; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A class representing the AutoExportJob data model. + /// An auto export job instance. Follows Azure Resource Manager standards: https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md + /// + public partial class AutoExportJobData : TrackedResourceData + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The location. + public AutoExportJobData(AzureLocation location) : base(location) + { + AutoExportPrefixes = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The id. + /// The name. + /// The resourceType. + /// The systemData. + /// The tags. + /// The location. + /// ARM provisioning state. + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + /// An array of blob paths/prefixes that get auto exported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths for now is 1. + /// The operational state of auto export. InProgress indicates the export is running. Disabling indicates the user has requested to disable the export but the disabling is still in progress. Disabled indicates auto export has been disabled. DisableFailed indicates the disabling has failed. Failed means the export was unable to continue, due to a fatal error. + /// Server-defined status code for auto export job. + /// Server-defined status message for auto export job. + /// Total files exported since the start of the export. This is accumulative, some files may be counted repeatedly. + /// Total data (in MiB) exported since the start of the export. This is accumulative, some files may be counted repeatedly. + /// Total files failed to be export since the last successfully completed iteration. This is accumulative, some files may be counted repeatedly. + /// Number of iterations completed since the start of the export. + /// Time (in UTC) of the last successfully completed export iteration. Look at logging container for details. + /// Files discovered for export in current iteration. It may increase while more export items are found. + /// Data (in MiB) discovered for export in current iteration. It may increase while more export items are found. + /// Files that have been exported in current iteration. + /// Data (in MiB) that have been exported in current iteration. + /// Files failed to export in current iteration. + /// The time (in UTC) the latest auto export job started. + /// The time (in UTC) of the last completed auto export job. + /// Keeps track of any properties unknown to the library. + internal AutoExportJobData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, IDictionary tags, AzureLocation location, AutoExportJobProvisioningStateType? provisioningState, AutoExportJobAdminStatus? adminStatus, IList autoExportPrefixes, AutoExportStatusType? state, string statusCode, string statusMessage, long? totalFilesExported, long? totalMiBExported, long? totalFilesFailed, int? exportIterationCount, DateTimeOffset? lastSuccessfulIterationCompletionTimeUTC, long? currentIterationFilesDiscovered, long? currentIterationMiBDiscovered, long? currentIterationFilesExported, long? currentIterationMiBExported, long? currentIterationFilesFailed, DateTimeOffset? lastStartedTimeUTC, DateTimeOffset? lastCompletionTimeUTC, IDictionary serializedAdditionalRawData) : base(id, name, resourceType, systemData, tags, location) + { + ProvisioningState = provisioningState; + AdminStatus = adminStatus; + AutoExportPrefixes = autoExportPrefixes; + State = state; + StatusCode = statusCode; + StatusMessage = statusMessage; + TotalFilesExported = totalFilesExported; + TotalMiBExported = totalMiBExported; + TotalFilesFailed = totalFilesFailed; + ExportIterationCount = exportIterationCount; + LastSuccessfulIterationCompletionTimeUTC = lastSuccessfulIterationCompletionTimeUTC; + CurrentIterationFilesDiscovered = currentIterationFilesDiscovered; + CurrentIterationMiBDiscovered = currentIterationMiBDiscovered; + CurrentIterationFilesExported = currentIterationFilesExported; + CurrentIterationMiBExported = currentIterationMiBExported; + CurrentIterationFilesFailed = currentIterationFilesFailed; + LastStartedTimeUTC = lastStartedTimeUTC; + LastCompletionTimeUTC = lastCompletionTimeUTC; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AutoExportJobData() + { + } + + /// ARM provisioning state. + public AutoExportJobProvisioningStateType? ProvisioningState { get; } + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + public AutoExportJobAdminStatus? AdminStatus { get; set; } + /// An array of blob paths/prefixes that get auto exported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths for now is 1. + public IList AutoExportPrefixes { get; } + /// The operational state of auto export. InProgress indicates the export is running. Disabling indicates the user has requested to disable the export but the disabling is still in progress. Disabled indicates auto export has been disabled. DisableFailed indicates the disabling has failed. Failed means the export was unable to continue, due to a fatal error. + public AutoExportStatusType? State { get; set; } + /// Server-defined status code for auto export job. + public string StatusCode { get; } + /// Server-defined status message for auto export job. + public string StatusMessage { get; } + /// Total files exported since the start of the export. This is accumulative, some files may be counted repeatedly. + public long? TotalFilesExported { get; } + /// Total data (in MiB) exported since the start of the export. This is accumulative, some files may be counted repeatedly. + public long? TotalMiBExported { get; } + /// Total files failed to be export since the last successfully completed iteration. This is accumulative, some files may be counted repeatedly. + public long? TotalFilesFailed { get; } + /// Number of iterations completed since the start of the export. + public int? ExportIterationCount { get; } + /// Time (in UTC) of the last successfully completed export iteration. Look at logging container for details. + public DateTimeOffset? LastSuccessfulIterationCompletionTimeUTC { get; } + /// Files discovered for export in current iteration. It may increase while more export items are found. + public long? CurrentIterationFilesDiscovered { get; } + /// Data (in MiB) discovered for export in current iteration. It may increase while more export items are found. + public long? CurrentIterationMiBDiscovered { get; } + /// Files that have been exported in current iteration. + public long? CurrentIterationFilesExported { get; } + /// Data (in MiB) that have been exported in current iteration. + public long? CurrentIterationMiBExported { get; } + /// Files failed to export in current iteration. + public long? CurrentIterationFilesFailed { get; } + /// The time (in UTC) the latest auto export job started. + public DateTimeOffset? LastStartedTimeUTC { get; } + /// The time (in UTC) of the last completed auto export job. + public DateTimeOffset? LastCompletionTimeUTC { get; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.Serialization.cs new file mode 100644 index 000000000000..ee0e342a3a90 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.Serialization.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.ResourceManager.StorageCache +{ + public partial class AutoExportJobResource : IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) => ((IJsonModel)Data).Write(writer, options); + + AutoExportJobData IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => ((IJsonModel)Data).Create(ref reader, options); + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => ModelReaderWriter.Write(Data, options); + + AutoExportJobData IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => ModelReaderWriter.Read(data, options); + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => ((IPersistableModel)Data).GetFormatFromOptions(options); + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.cs new file mode 100644 index 000000000000..641597a5cbaf --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoExportJobResource.cs @@ -0,0 +1,707 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A Class representing an AutoExportJob along with the instance operations that can be performed on it. + /// If you have a you can construct an + /// from an instance of using the GetAutoExportJobResource method. + /// Otherwise you can get one from its parent resource using the GetAutoExportJob method. + /// + public partial class AutoExportJobResource : ArmResource + { + /// Generate the resource identifier of a instance. + /// The subscriptionId. + /// The resourceGroupName. + /// The amlFileSystemName. + /// The autoExportJobName. + public static ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) + { + var resourceId = $"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFileSystemName}/autoExportJobs/{autoExportJobName}"; + return new ResourceIdentifier(resourceId); + } + + private readonly ClientDiagnostics _autoExportJobautoExportJobsClientDiagnostics; + private readonly AutoExportJobsRestOperations _autoExportJobautoExportJobsRestClient; + private readonly AutoExportJobData _data; + + /// Gets the resource type for the operations. + public static readonly ResourceType ResourceType = "Microsoft.StorageCache/amlFilesystems/autoExportJobs"; + + /// Initializes a new instance of the class for mocking. + protected AutoExportJobResource() + { + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The resource that is the target of operations. + internal AutoExportJobResource(ArmClient client, AutoExportJobData data) : this(client, data.Id) + { + HasData = true; + _data = data; + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The identifier of the resource that is the target of operations. + internal AutoExportJobResource(ArmClient client, ResourceIdentifier id) : base(client, id) + { + _autoExportJobautoExportJobsClientDiagnostics = new ClientDiagnostics("Azure.ResourceManager.StorageCache", ResourceType.Namespace, Diagnostics); + TryGetApiVersion(ResourceType, out string autoExportJobautoExportJobsApiVersion); + _autoExportJobautoExportJobsRestClient = new AutoExportJobsRestOperations(Pipeline, Diagnostics.ApplicationId, Endpoint, autoExportJobautoExportJobsApiVersion); +#if DEBUG + ValidateResourceId(Id); +#endif + } + + /// Gets whether or not the current instance has data. + public virtual bool HasData { get; } + + /// Gets the data representing this Feature. + /// Throws if there is no data loaded in the current instance. + public virtual AutoExportJobData Data + { + get + { + if (!HasData) + throw new InvalidOperationException("The current instance does not have data, you must call Get first."); + return _data; + } + } + + internal static void ValidateResourceId(ResourceIdentifier id) + { + if (id.ResourceType != ResourceType) + throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid resource type {0} expected {1}", id.ResourceType, ResourceType), nameof(id)); + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + public virtual async Task> GetAsync(CancellationToken cancellationToken = default) + { + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Get"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto export job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + public virtual Response Get(CancellationToken cancellationToken = default) + { + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Get"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoExportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Schedules an auto export job for deletion. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Delete + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The cancellation token to use. + public virtual async Task DeleteAsync(WaitUntil waitUntil, CancellationToken cancellationToken = default) + { + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Delete"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.DeleteAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(_autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateDeleteRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name).Request, response, OperationFinalStateVia.Location); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionResponseAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Schedules an auto export job for deletion. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Delete + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The cancellation token to use. + public virtual ArmOperation Delete(WaitUntil waitUntil, CancellationToken cancellationToken = default) + { + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Delete"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Delete(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + var operation = new StorageCacheArmOperation(_autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateDeleteRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name).Request, response, OperationFinalStateVia.Location); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletionResponse(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Update an auto export job instance. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Update + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is null. + public virtual async Task> UpdateAsync(WaitUntil waitUntil, AutoExportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(patch, nameof(patch)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Update"); + scope.Start(); + try + { + var response = await _autoExportJobautoExportJobsRestClient.UpdateAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(new AutoExportJobOperationSource(Client), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Update an auto export job instance. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Update + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is null. + public virtual ArmOperation Update(WaitUntil waitUntil, AutoExportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(patch, nameof(patch)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.Update"); + scope.Start(); + try + { + var response = _autoExportJobautoExportJobsRestClient.Update(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch, cancellationToken); + var operation = new StorageCacheArmOperation(new AutoExportJobOperationSource(Client), _autoExportJobautoExportJobsClientDiagnostics, Pipeline, _autoExportJobautoExportJobsRestClient.CreateUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletion(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Add a tag to the current resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The value for the tag. + /// The cancellation token to use. + /// or is null. + public virtual async Task> AddTagAsync(string key, string value, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + Argument.AssertNotNull(value, nameof(value)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.AddTag"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues[key] = value; + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoExportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags[key] = value; + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Add a tag to the current resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The value for the tag. + /// The cancellation token to use. + /// or is null. + public virtual Response AddTag(string key, string value, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + Argument.AssertNotNull(value, nameof(value)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.AddTag"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues[key] = value; + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoExportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags[key] = value; + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Replace the tags on the resource with the given set. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The set of tags to use as replacement. + /// The cancellation token to use. + /// is null. + public virtual async Task> SetTagsAsync(IDictionary tags, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(tags, nameof(tags)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.SetTags"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + await GetTagResource().DeleteAsync(WaitUntil.Completed, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues.ReplaceWith(tags); + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoExportJobPatch(); + patch.Tags.ReplaceWith(tags); + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Replace the tags on the resource with the given set. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The set of tags to use as replacement. + /// The cancellation token to use. + /// is null. + public virtual Response SetTags(IDictionary tags, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(tags, nameof(tags)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.SetTags"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + GetTagResource().Delete(WaitUntil.Completed, cancellationToken: cancellationToken); + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues.ReplaceWith(tags); + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoExportJobPatch(); + patch.Tags.ReplaceWith(tags); + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Removes a tag by key from the resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The cancellation token to use. + /// is null. + public virtual async Task> RemoveTagAsync(string key, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.RemoveTag"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues.Remove(key); + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoExportJobautoExportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoExportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags.Remove(key); + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Removes a tag by key from the resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoExportJobs/{autoExportJobName} + /// + /// + /// Operation Id + /// autoExportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The cancellation token to use. + /// is null. + public virtual Response RemoveTag(string key, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + + using var scope = _autoExportJobautoExportJobsClientDiagnostics.CreateScope("AutoExportJobResource.RemoveTag"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues.Remove(key); + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoExportJobautoExportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoExportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoExportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags.Remove(key); + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobCollection.cs new file mode 100644 index 000000000000..f071a5f0e308 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobCollection.cs @@ -0,0 +1,493 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Autorest.CSharp.Core; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A class representing a collection of and their operations. + /// Each in the collection will belong to the same instance of . + /// To get an instance call the GetAutoImportJobs method from an instance of . + /// + public partial class AutoImportJobCollection : ArmCollection, IEnumerable, IAsyncEnumerable + { + private readonly ClientDiagnostics _autoImportJobautoImportJobsClientDiagnostics; + private readonly AutoImportJobsRestOperations _autoImportJobautoImportJobsRestClient; + + /// Initializes a new instance of the class for mocking. + protected AutoImportJobCollection() + { + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The identifier of the parent resource that is the target of operations. + internal AutoImportJobCollection(ArmClient client, ResourceIdentifier id) : base(client, id) + { + _autoImportJobautoImportJobsClientDiagnostics = new ClientDiagnostics("Azure.ResourceManager.StorageCache", AutoImportJobResource.ResourceType.Namespace, Diagnostics); + TryGetApiVersion(AutoImportJobResource.ResourceType, out string autoImportJobautoImportJobsApiVersion); + _autoImportJobautoImportJobsRestClient = new AutoImportJobsRestOperations(Pipeline, Diagnostics.ApplicationId, Endpoint, autoImportJobautoImportJobsApiVersion); +#if DEBUG + ValidateResourceId(Id); +#endif + } + + internal static void ValidateResourceId(ResourceIdentifier id) + { + if (id.ResourceType != AmlFileSystemResource.ResourceType) + throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid resource type {0} expected {1}", id.ResourceType, AmlFileSystemResource.ResourceType), nameof(id)); + } + + /// + /// Create or update an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_CreateOrUpdate + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// or is null. + public virtual async Task> CreateOrUpdateAsync(WaitUntil waitUntil, string autoImportJobName, AutoImportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.CreateOrUpdate"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.CreateOrUpdateAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, data, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(new AutoImportJobOperationSource(Client), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateCreateOrUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, data).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Create or update an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_CreateOrUpdate + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// or is null. + public virtual ArmOperation CreateOrUpdate(WaitUntil waitUntil, string autoImportJobName, AutoImportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.CreateOrUpdate"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.CreateOrUpdate(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, data, cancellationToken); + var operation = new StorageCacheArmOperation(new AutoImportJobOperationSource(Client), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateCreateOrUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, data).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletion(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> GetAsync(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.Get"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken).ConfigureAwait(false); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual Response Get(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.Get"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns all the auto import jobs the user has access to under an AML File System. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs + /// + /// + /// Operation Id + /// autoImportJobs_ListByAmlFileSystem + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + /// An async collection of that may take multiple service requests to iterate over. + public virtual AsyncPageable GetAllAsync(CancellationToken cancellationToken = default) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => _autoImportJobautoImportJobsRestClient.CreateListByAmlFileSystemRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => _autoImportJobautoImportJobsRestClient.CreateListByAmlFileSystemNextPageRequest(nextLink, Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, NextPageRequest, e => new AutoImportJobResource(Client, AutoImportJobData.DeserializeAutoImportJobData(e)), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, "AutoImportJobCollection.GetAll", "value", "nextLink", cancellationToken); + } + + /// + /// Returns all the auto import jobs the user has access to under an AML File System. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs + /// + /// + /// Operation Id + /// autoImportJobs_ListByAmlFileSystem + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + /// A collection of that may take multiple service requests to iterate over. + public virtual Pageable GetAll(CancellationToken cancellationToken = default) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => _autoImportJobautoImportJobsRestClient.CreateListByAmlFileSystemRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + HttpMessage NextPageRequest(int? pageSizeHint, string nextLink) => _autoImportJobautoImportJobsRestClient.CreateListByAmlFileSystemNextPageRequest(nextLink, Id.SubscriptionId, Id.ResourceGroupName, Id.Name); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, NextPageRequest, e => new AutoImportJobResource(Client, AutoImportJobData.DeserializeAutoImportJobData(e)), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, "AutoImportJobCollection.GetAll", "value", "nextLink", cancellationToken); + } + + /// + /// Checks to see if the resource exists in azure. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> ExistsAsync(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.Exists"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(response.Value != null, response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Checks to see if the resource exists in azure. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual Response Exists(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.Exists"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken: cancellationToken); + return Response.FromValue(response.Value != null, response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Tries to get details for this resource from the service. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual async Task> GetIfExistsAsync(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.GetIfExists"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken: cancellationToken).ConfigureAwait(false); + if (response.Value == null) + return new NoValueResponse(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Tries to get details for this resource from the service. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// is an empty string, and was expected to be non-empty. + /// is null. + public virtual NullableResponse GetIfExists(string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobCollection.GetIfExists"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Name, autoImportJobName, cancellationToken: cancellationToken); + if (response.Value == null) + return new NoValueResponse(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetAll().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetAll().GetEnumerator(); + } + + IAsyncEnumerator IAsyncEnumerable.GetAsyncEnumerator(CancellationToken cancellationToken) + { + return GetAllAsync(cancellationToken: cancellationToken).GetAsyncEnumerator(cancellationToken); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.Serialization.cs new file mode 100644 index 000000000000..1da4892033f7 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.Serialization.cs @@ -0,0 +1,586 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.ResourceManager.Models; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + public partial class AutoImportJobData : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobData)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + if (options.Format != "W" && Optional.IsDefined(ProvisioningState)) + { + writer.WritePropertyName("provisioningState"u8); + writer.WriteStringValue(ProvisioningState.Value.ToString()); + } + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } + if (Optional.IsCollectionDefined(AutoImportPrefixes)) + { + writer.WritePropertyName("autoImportPrefixes"u8); + writer.WriteStartArray(); + foreach (var item in AutoImportPrefixes) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(ConflictResolutionMode)) + { + writer.WritePropertyName("conflictResolutionMode"u8); + writer.WriteStringValue(ConflictResolutionMode.Value.ToString()); + } + if (Optional.IsDefined(EnableDeletions)) + { + writer.WritePropertyName("enableDeletions"u8); + writer.WriteBooleanValue(EnableDeletions.Value); + } + if (Optional.IsDefined(MaximumErrors)) + { + writer.WritePropertyName("maximumErrors"u8); + writer.WriteNumberValue(MaximumErrors.Value); + } + writer.WritePropertyName("status"u8); + writer.WriteStartObject(); + if (options.Format != "W" && Optional.IsDefined(State)) + { + writer.WritePropertyName("state"u8); + writer.WriteStringValue(State.Value.ToString()); + } + if (options.Format != "W" && Optional.IsDefined(ScanStartOn)) + { + writer.WritePropertyName("scanStartTime"u8); + writer.WriteStringValue(ScanStartOn.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(ScanEndOn)) + { + writer.WritePropertyName("scanEndTime"u8); + writer.WriteStringValue(ScanEndOn.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(TotalBlobsWalked)) + { + writer.WritePropertyName("totalBlobsWalked"u8); + writer.WriteNumberValue(TotalBlobsWalked.Value); + } + if (options.Format != "W" && Optional.IsDefined(RateOfBlobWalk)) + { + writer.WritePropertyName("rateOfBlobWalk"u8); + writer.WriteNumberValue(RateOfBlobWalk.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalBlobsImported)) + { + writer.WritePropertyName("totalBlobsImported"u8); + writer.WriteNumberValue(TotalBlobsImported.Value); + } + if (options.Format != "W" && Optional.IsDefined(RateOfBlobImport)) + { + writer.WritePropertyName("rateOfBlobImport"u8); + writer.WriteNumberValue(RateOfBlobImport.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedFiles)) + { + writer.WritePropertyName("importedFiles"u8); + writer.WriteNumberValue(ImportedFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedDirectories)) + { + writer.WritePropertyName("importedDirectories"u8); + writer.WriteNumberValue(ImportedDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedSymlinks)) + { + writer.WritePropertyName("importedSymlinks"u8); + writer.WriteNumberValue(ImportedSymlinks.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingFiles)) + { + writer.WritePropertyName("preexistingFiles"u8); + writer.WriteNumberValue(PreexistingFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingDirectories)) + { + writer.WritePropertyName("preexistingDirectories"u8); + writer.WriteNumberValue(PreexistingDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingSymlinks)) + { + writer.WritePropertyName("preexistingSymlinks"u8); + writer.WriteNumberValue(PreexistingSymlinks.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalErrors)) + { + writer.WritePropertyName("totalErrors"u8); + writer.WriteNumberValue(TotalErrors.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalConflicts)) + { + writer.WritePropertyName("totalConflicts"u8); + writer.WriteNumberValue(TotalConflicts.Value); + } + if (options.Format != "W" && Optional.IsDefined(BlobSyncEvents)) + { + writer.WritePropertyName("blobSyncEvents"u8); + writer.WriteObjectValue(BlobSyncEvents, options); + } + if (options.Format != "W" && Optional.IsDefined(LastStartedTimeUTC)) + { + writer.WritePropertyName("lastStartedTimeUTC"u8); + writer.WriteStringValue(LastStartedTimeUTC.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(LastCompletionTimeUTC)) + { + writer.WritePropertyName("lastCompletionTimeUTC"u8); + writer.WriteStringValue(LastCompletionTimeUTC.Value, "O"); + } + writer.WriteEndObject(); + writer.WriteEndObject(); + } + + AutoImportJobData IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobData)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoImportJobData(document.RootElement, options); + } + + internal static AutoImportJobData DeserializeAutoImportJobData(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary tags = default; + AzureLocation location = default; + ResourceIdentifier id = default; + string name = default; + ResourceType type = default; + SystemData systemData = default; + AutoImportJobPropertiesProvisioningState? provisioningState = default; + AutoImportJobPropertiesAdminStatus? adminStatus = default; + IList autoImportPrefixes = default; + ConflictResolutionMode? conflictResolutionMode = default; + bool? enableDeletions = default; + long? maximumErrors = default; + AutoImportJobState? state = default; + DateTimeOffset? scanStartTime = default; + DateTimeOffset? scanEndTime = default; + long? totalBlobsWalked = default; + long? rateOfBlobWalk = default; + long? totalBlobsImported = default; + long? rateOfBlobImport = default; + long? importedFiles = default; + long? importedDirectories = default; + long? importedSymlinks = default; + long? preexistingFiles = default; + long? preexistingDirectories = default; + long? preexistingSymlinks = default; + long? totalErrors = default; + long? totalConflicts = default; + AutoImportJobPropertiesStatusBlobSyncEvents blobSyncEvents = default; + DateTimeOffset? lastStartedTimeUTC = default; + DateTimeOffset? lastCompletionTimeUTC = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + tags = dictionary; + continue; + } + if (property.NameEquals("location"u8)) + { + location = new AzureLocation(property.Value.GetString()); + continue; + } + if (property.NameEquals("id"u8)) + { + id = new ResourceIdentifier(property.Value.GetString()); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = new ResourceType(property.Value.GetString()); + continue; + } + if (property.NameEquals("systemData"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + systemData = JsonSerializer.Deserialize(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("properties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("provisioningState"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + provisioningState = new AutoImportJobPropertiesProvisioningState(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new AutoImportJobPropertiesAdminStatus(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("autoImportPrefixes"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property0.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + autoImportPrefixes = array; + continue; + } + if (property0.NameEquals("conflictResolutionMode"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + conflictResolutionMode = new ConflictResolutionMode(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("enableDeletions"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableDeletions = property0.Value.GetBoolean(); + continue; + } + if (property0.NameEquals("maximumErrors"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maximumErrors = property0.Value.GetInt64(); + continue; + } + if (property0.NameEquals("status"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + property0.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property1 in property0.Value.EnumerateObject()) + { + if (property1.NameEquals("state"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + state = new AutoImportJobState(property1.Value.GetString()); + continue; + } + if (property1.NameEquals("scanStartTime"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + scanStartTime = property1.Value.GetDateTimeOffset("O"); + continue; + } + if (property1.NameEquals("scanEndTime"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + scanEndTime = property1.Value.GetDateTimeOffset("O"); + continue; + } + if (property1.NameEquals("totalBlobsWalked"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalBlobsWalked = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("rateOfBlobWalk"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rateOfBlobWalk = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("totalBlobsImported"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalBlobsImported = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("rateOfBlobImport"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rateOfBlobImport = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("importedFiles"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedFiles = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("importedDirectories"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedDirectories = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("importedSymlinks"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedSymlinks = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingFiles"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingFiles = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingDirectories"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingDirectories = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingSymlinks"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingSymlinks = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("totalErrors"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalErrors = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("totalConflicts"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalConflicts = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("blobSyncEvents"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + blobSyncEvents = AutoImportJobPropertiesStatusBlobSyncEvents.DeserializeAutoImportJobPropertiesStatusBlobSyncEvents(property1.Value, options); + continue; + } + if (property1.NameEquals("lastStartedTimeUTC"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastStartedTimeUTC = property1.Value.GetDateTimeOffset("O"); + continue; + } + if (property1.NameEquals("lastCompletionTimeUTC"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastCompletionTimeUTC = property1.Value.GetDateTimeOffset("O"); + continue; + } + } + continue; + } + } + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoImportJobData( + id, + name, + type, + systemData, + tags ?? new ChangeTrackingDictionary(), + location, + provisioningState, + adminStatus, + autoImportPrefixes ?? new ChangeTrackingList(), + conflictResolutionMode, + enableDeletions, + maximumErrors, + state, + scanStartTime, + scanEndTime, + totalBlobsWalked, + rateOfBlobWalk, + totalBlobsImported, + rateOfBlobImport, + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, + totalErrors, + totalConflicts, + blobSyncEvents, + lastStartedTimeUTC, + lastCompletionTimeUTC, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoImportJobData)} does not support writing '{options.Format}' format."); + } + } + + AutoImportJobData IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoImportJobData(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoImportJobData)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.cs new file mode 100644 index 000000000000..349798ef0671 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobData.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core; +using Azure.ResourceManager.Models; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A class representing the AutoImportJob data model. + /// An auto import job instance. Follows Azure Resource Manager standards: https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/resource-api-reference.md + /// + public partial class AutoImportJobData : TrackedResourceData + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The location. + public AutoImportJobData(AzureLocation location) : base(location) + { + AutoImportPrefixes = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The id. + /// The name. + /// The resourceType. + /// The systemData. + /// The tags. + /// The location. + /// ARM provisioning state. + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + /// An array of blob paths/prefixes that get auto imported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths is 100. + /// How the auto import job will handle conflicts. For example, if the auto import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the auto import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the auto import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or is currently released. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/blob-integration#conflict-resolution-mode for a thorough explanation of these resolution modes. + /// Whether or not to enable deletions during auto import. This only affects overwrite-dirty. + /// Total non-conflict-oriented errors (e.g., OS errors) Import will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately on any error. + /// The state of the auto import operation. + /// Date and time of when the currently running full scan began. + /// Date and time of when the full scan ended. + /// Total number of blobs walked during full scan. + /// Rate of blobs walked during full scan. + /// Total number of blobs imported during full scan. + /// Rate of blob import during full scan. + /// Number of files imported during full scan. + /// Number of directories imported during full scan. + /// Number of symlinks imported during full scan. + /// Number of preexisting files during full scan. + /// Number of preexisting directories during full scan. + /// Number of preexisting symlinks during full scan. + /// Total errors encountered during full scan. + /// Total conflicts encountered during full scan. + /// The storage account blob change feed status of the auto import job. + /// The time (in UTC) the latest auto import job started. + /// The time (in UTC) of the last completed auto import job. + /// Keeps track of any properties unknown to the library. + internal AutoImportJobData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, IDictionary tags, AzureLocation location, AutoImportJobPropertiesProvisioningState? provisioningState, AutoImportJobPropertiesAdminStatus? adminStatus, IList autoImportPrefixes, ConflictResolutionMode? conflictResolutionMode, bool? enableDeletions, long? maximumErrors, AutoImportJobState? state, DateTimeOffset? scanStartOn, DateTimeOffset? scanEndOn, long? totalBlobsWalked, long? rateOfBlobWalk, long? totalBlobsImported, long? rateOfBlobImport, long? importedFiles, long? importedDirectories, long? importedSymlinks, long? preexistingFiles, long? preexistingDirectories, long? preexistingSymlinks, long? totalErrors, long? totalConflicts, AutoImportJobPropertiesStatusBlobSyncEvents blobSyncEvents, DateTimeOffset? lastStartedTimeUTC, DateTimeOffset? lastCompletionTimeUTC, IDictionary serializedAdditionalRawData) : base(id, name, resourceType, systemData, tags, location) + { + ProvisioningState = provisioningState; + AdminStatus = adminStatus; + AutoImportPrefixes = autoImportPrefixes; + ConflictResolutionMode = conflictResolutionMode; + EnableDeletions = enableDeletions; + MaximumErrors = maximumErrors; + State = state; + ScanStartOn = scanStartOn; + ScanEndOn = scanEndOn; + TotalBlobsWalked = totalBlobsWalked; + RateOfBlobWalk = rateOfBlobWalk; + TotalBlobsImported = totalBlobsImported; + RateOfBlobImport = rateOfBlobImport; + ImportedFiles = importedFiles; + ImportedDirectories = importedDirectories; + ImportedSymlinks = importedSymlinks; + PreexistingFiles = preexistingFiles; + PreexistingDirectories = preexistingDirectories; + PreexistingSymlinks = preexistingSymlinks; + TotalErrors = totalErrors; + TotalConflicts = totalConflicts; + BlobSyncEvents = blobSyncEvents; + LastStartedTimeUTC = lastStartedTimeUTC; + LastCompletionTimeUTC = lastCompletionTimeUTC; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AutoImportJobData() + { + } + + /// ARM provisioning state. + public AutoImportJobPropertiesProvisioningState? ProvisioningState { get; } + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + public AutoImportJobPropertiesAdminStatus? AdminStatus { get; set; } + /// An array of blob paths/prefixes that get auto imported to the cluster namespace. It has '/' as the default value. Number of maximum allowed paths is 100. + public IList AutoImportPrefixes { get; } + /// How the auto import job will handle conflicts. For example, if the auto import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the auto import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the auto import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or is currently released. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/blob-integration#conflict-resolution-mode for a thorough explanation of these resolution modes. + public ConflictResolutionMode? ConflictResolutionMode { get; set; } + /// Whether or not to enable deletions during auto import. This only affects overwrite-dirty. + public bool? EnableDeletions { get; set; } + /// Total non-conflict-oriented errors (e.g., OS errors) Import will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately on any error. + public long? MaximumErrors { get; set; } + /// The state of the auto import operation. + public AutoImportJobState? State { get; } + /// Date and time of when the currently running full scan began. + public DateTimeOffset? ScanStartOn { get; } + /// Date and time of when the full scan ended. + public DateTimeOffset? ScanEndOn { get; } + /// Total number of blobs walked during full scan. + public long? TotalBlobsWalked { get; } + /// Rate of blobs walked during full scan. + public long? RateOfBlobWalk { get; } + /// Total number of blobs imported during full scan. + public long? TotalBlobsImported { get; } + /// Rate of blob import during full scan. + public long? RateOfBlobImport { get; } + /// Number of files imported during full scan. + public long? ImportedFiles { get; } + /// Number of directories imported during full scan. + public long? ImportedDirectories { get; } + /// Number of symlinks imported during full scan. + public long? ImportedSymlinks { get; } + /// Number of preexisting files during full scan. + public long? PreexistingFiles { get; } + /// Number of preexisting directories during full scan. + public long? PreexistingDirectories { get; } + /// Number of preexisting symlinks during full scan. + public long? PreexistingSymlinks { get; } + /// Total errors encountered during full scan. + public long? TotalErrors { get; } + /// Total conflicts encountered during full scan. + public long? TotalConflicts { get; } + /// The storage account blob change feed status of the auto import job. + public AutoImportJobPropertiesStatusBlobSyncEvents BlobSyncEvents { get; } + /// The time (in UTC) the latest auto import job started. + public DateTimeOffset? LastStartedTimeUTC { get; } + /// The time (in UTC) of the last completed auto import job. + public DateTimeOffset? LastCompletionTimeUTC { get; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.Serialization.cs new file mode 100644 index 000000000000..20fe5ffb81b1 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.Serialization.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.ResourceManager.StorageCache +{ + public partial class AutoImportJobResource : IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) => ((IJsonModel)Data).Write(writer, options); + + AutoImportJobData IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => ((IJsonModel)Data).Create(ref reader, options); + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => ModelReaderWriter.Write(Data, options); + + AutoImportJobData IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => ModelReaderWriter.Read(data, options); + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => ((IPersistableModel)Data).GetFormatFromOptions(options); + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.cs new file mode 100644 index 000000000000..84930227011e --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/AutoImportJobResource.cs @@ -0,0 +1,707 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + /// + /// A Class representing an AutoImportJob along with the instance operations that can be performed on it. + /// If you have a you can construct an + /// from an instance of using the GetAutoImportJobResource method. + /// Otherwise you can get one from its parent resource using the GetAutoImportJob method. + /// + public partial class AutoImportJobResource : ArmResource + { + /// Generate the resource identifier of a instance. + /// The subscriptionId. + /// The resourceGroupName. + /// The amlFileSystemName. + /// The autoImportJobName. + public static ResourceIdentifier CreateResourceIdentifier(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) + { + var resourceId = $"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFileSystemName}/autoImportJobs/{autoImportJobName}"; + return new ResourceIdentifier(resourceId); + } + + private readonly ClientDiagnostics _autoImportJobautoImportJobsClientDiagnostics; + private readonly AutoImportJobsRestOperations _autoImportJobautoImportJobsRestClient; + private readonly AutoImportJobData _data; + + /// Gets the resource type for the operations. + public static readonly ResourceType ResourceType = "Microsoft.StorageCache/amlFilesystems/autoImportJobs"; + + /// Initializes a new instance of the class for mocking. + protected AutoImportJobResource() + { + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The resource that is the target of operations. + internal AutoImportJobResource(ArmClient client, AutoImportJobData data) : this(client, data.Id) + { + HasData = true; + _data = data; + } + + /// Initializes a new instance of the class. + /// The client parameters to use in these operations. + /// The identifier of the resource that is the target of operations. + internal AutoImportJobResource(ArmClient client, ResourceIdentifier id) : base(client, id) + { + _autoImportJobautoImportJobsClientDiagnostics = new ClientDiagnostics("Azure.ResourceManager.StorageCache", ResourceType.Namespace, Diagnostics); + TryGetApiVersion(ResourceType, out string autoImportJobautoImportJobsApiVersion); + _autoImportJobautoImportJobsRestClient = new AutoImportJobsRestOperations(Pipeline, Diagnostics.ApplicationId, Endpoint, autoImportJobautoImportJobsApiVersion); +#if DEBUG + ValidateResourceId(Id); +#endif + } + + /// Gets whether or not the current instance has data. + public virtual bool HasData { get; } + + /// Gets the data representing this Feature. + /// Throws if there is no data loaded in the current instance. + public virtual AutoImportJobData Data + { + get + { + if (!HasData) + throw new InvalidOperationException("The current instance does not have data, you must call Get first."); + return _data; + } + } + + internal static void ValidateResourceId(ResourceIdentifier id) + { + if (id.ResourceType != ResourceType) + throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Invalid resource type {0} expected {1}", id.ResourceType, ResourceType), nameof(id)); + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + public virtual async Task> GetAsync(CancellationToken cancellationToken = default) + { + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Get"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns an auto import job. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The cancellation token to use. + public virtual Response Get(CancellationToken cancellationToken = default) + { + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Get"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + if (response.Value == null) + throw new RequestFailedException(response.GetRawResponse()); + return Response.FromValue(new AutoImportJobResource(Client, response.Value), response.GetRawResponse()); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Schedules an auto import job for deletion. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Delete + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The cancellation token to use. + public virtual async Task DeleteAsync(WaitUntil waitUntil, CancellationToken cancellationToken = default) + { + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Delete"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.DeleteAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(_autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateDeleteRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name).Request, response, OperationFinalStateVia.Location); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionResponseAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Schedules an auto import job for deletion. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Delete + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The cancellation token to use. + public virtual ArmOperation Delete(WaitUntil waitUntil, CancellationToken cancellationToken = default) + { + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Delete"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Delete(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + var operation = new StorageCacheArmOperation(_autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateDeleteRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name).Request, response, OperationFinalStateVia.Location); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletionResponse(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Update an auto import job instance. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Update + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is null. + public virtual async Task> UpdateAsync(WaitUntil waitUntil, AutoImportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(patch, nameof(patch)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Update"); + scope.Start(); + try + { + var response = await _autoImportJobautoImportJobsRestClient.UpdateAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch, cancellationToken).ConfigureAwait(false); + var operation = new StorageCacheArmOperation(new AutoImportJobOperationSource(Client), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + await operation.WaitForCompletionAsync(cancellationToken).ConfigureAwait(false); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Update an auto import job instance. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Update + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// is null. + public virtual ArmOperation Update(WaitUntil waitUntil, AutoImportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(patch, nameof(patch)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.Update"); + scope.Start(); + try + { + var response = _autoImportJobautoImportJobsRestClient.Update(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch, cancellationToken); + var operation = new StorageCacheArmOperation(new AutoImportJobOperationSource(Client), _autoImportJobautoImportJobsClientDiagnostics, Pipeline, _autoImportJobautoImportJobsRestClient.CreateUpdateRequest(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, patch).Request, response, OperationFinalStateVia.AzureAsyncOperation); + if (waitUntil == WaitUntil.Completed) + operation.WaitForCompletion(cancellationToken); + return operation; + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Add a tag to the current resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The value for the tag. + /// The cancellation token to use. + /// or is null. + public virtual async Task> AddTagAsync(string key, string value, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + Argument.AssertNotNull(value, nameof(value)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.AddTag"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues[key] = value; + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoImportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags[key] = value; + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Add a tag to the current resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The value for the tag. + /// The cancellation token to use. + /// or is null. + public virtual Response AddTag(string key, string value, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + Argument.AssertNotNull(value, nameof(value)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.AddTag"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues[key] = value; + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoImportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags[key] = value; + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Replace the tags on the resource with the given set. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The set of tags to use as replacement. + /// The cancellation token to use. + /// is null. + public virtual async Task> SetTagsAsync(IDictionary tags, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(tags, nameof(tags)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.SetTags"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + await GetTagResource().DeleteAsync(WaitUntil.Completed, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues.ReplaceWith(tags); + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoImportJobPatch(); + patch.Tags.ReplaceWith(tags); + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Replace the tags on the resource with the given set. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The set of tags to use as replacement. + /// The cancellation token to use. + /// is null. + public virtual Response SetTags(IDictionary tags, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(tags, nameof(tags)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.SetTags"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + GetTagResource().Delete(WaitUntil.Completed, cancellationToken: cancellationToken); + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues.ReplaceWith(tags); + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoImportJobPatch(); + patch.Tags.ReplaceWith(tags); + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Removes a tag by key from the resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The cancellation token to use. + /// is null. + public virtual async Task> RemoveTagAsync(string key, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.RemoveTag"); + scope.Start(); + try + { + if (await CanUseTagResourceAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + var originalTags = await GetTagResource().GetAsync(cancellationToken).ConfigureAwait(false); + originalTags.Value.Data.TagValues.Remove(key); + await GetTagResource().CreateOrUpdateAsync(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken).ConfigureAwait(false); + var originalResponse = await _autoImportJobautoImportJobsRestClient.GetAsync(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken).ConfigureAwait(false); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = (await GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).Value.Data; + var patch = new AutoImportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags.Remove(key); + var result = await UpdateAsync(WaitUntil.Completed, patch, cancellationToken: cancellationToken).ConfigureAwait(false); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Removes a tag by key from the resource. + /// + /// + /// Request Path + /// /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageCache/amlFilesystems/{amlFilesystemName}/autoImportJobs/{autoImportJobName} + /// + /// + /// Operation Id + /// autoImportJobs_Get + /// + /// + /// Default Api Version + /// 2025-07-01 + /// + /// + /// Resource + /// + /// + /// + /// + /// The key for the tag. + /// The cancellation token to use. + /// is null. + public virtual Response RemoveTag(string key, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(key, nameof(key)); + + using var scope = _autoImportJobautoImportJobsClientDiagnostics.CreateScope("AutoImportJobResource.RemoveTag"); + scope.Start(); + try + { + if (CanUseTagResource(cancellationToken: cancellationToken)) + { + var originalTags = GetTagResource().Get(cancellationToken); + originalTags.Value.Data.TagValues.Remove(key); + GetTagResource().CreateOrUpdate(WaitUntil.Completed, originalTags.Value.Data, cancellationToken: cancellationToken); + var originalResponse = _autoImportJobautoImportJobsRestClient.Get(Id.SubscriptionId, Id.ResourceGroupName, Id.Parent.Name, Id.Name, cancellationToken); + return Response.FromValue(new AutoImportJobResource(Client, originalResponse.Value), originalResponse.GetRawResponse()); + } + else + { + var current = Get(cancellationToken: cancellationToken).Value.Data; + var patch = new AutoImportJobPatch(); + foreach (var tag in current.Tags) + { + patch.Tags.Add(tag); + } + patch.Tags.Remove(key); + var result = Update(WaitUntil.Completed, patch, cancellationToken: cancellationToken); + return Response.FromValue(result.Value, result.GetRawResponse()); + } + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheArmClient.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheArmClient.cs index c379762f607e..267f6f77c239 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheArmClient.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheArmClient.cs @@ -46,6 +46,18 @@ public virtual AmlFileSystemResource GetAmlFileSystemResource(ResourceIdentifier return new AmlFileSystemResource(Client, id); } + /// + /// Gets an object representing an along with the instance operations that can be performed on it but with no data. + /// You can use to create an from its components. + /// + /// The resource ID of the resource to get. + /// Returns a object. + public virtual AutoExportJobResource GetAutoExportJobResource(ResourceIdentifier id) + { + AutoExportJobResource.ValidateResourceId(id); + return new AutoExportJobResource(Client, id); + } + /// /// Gets an object representing a along with the instance operations that can be performed on it but with no data. /// You can use to create a from its components. @@ -58,6 +70,18 @@ public virtual StorageCacheImportJobResource GetStorageCacheImportJobResource(Re return new StorageCacheImportJobResource(Client, id); } + /// + /// Gets an object representing an along with the instance operations that can be performed on it but with no data. + /// You can use to create an from its components. + /// + /// The resource ID of the resource to get. + /// Returns a object. + public virtual AutoImportJobResource GetAutoImportJobResource(ResourceIdentifier id) + { + AutoImportJobResource.ValidateResourceId(id); + return new AutoImportJobResource(Client, id); + } + /// /// Gets an object representing a along with the instance operations that can be performed on it but with no data. /// You can use to create a from its components. diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheResourceGroupResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheResourceGroupResource.cs index 88d6ff88d4b5..46fc51e98859 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheResourceGroupResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheResourceGroupResource.cs @@ -53,7 +53,7 @@ public virtual AmlFileSystemCollection GetAmlFileSystems() /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -84,7 +84,7 @@ public virtual async Task> GetAmlFileSystemAsync /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -122,7 +122,7 @@ public virtual StorageCacheCollection GetStorageCaches() /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -153,7 +153,7 @@ public virtual async Task> GetStorageCacheAsync(s /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheSubscriptionResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheSubscriptionResource.cs index 78f7303f9271..acbb349deafc 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheSubscriptionResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/MockableStorageCacheSubscriptionResource.cs @@ -75,7 +75,7 @@ private string GetApiVersionOrNull(ResourceType resourceType) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -105,7 +105,7 @@ public virtual AsyncPageable GetAmlFileSystemsAsync(Cance /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -135,7 +135,7 @@ public virtual Pageable GetAmlFileSystems(CancellationTok /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -170,7 +170,7 @@ public virtual async Task CheckAmlFSSubnetsAsync(AmlFileSystemSubnetCo /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -205,7 +205,7 @@ public virtual Response CheckAmlFSSubnets(AmlFileSystemSubnetContent content = n /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -240,7 +240,7 @@ public virtual async Task> GetRequire /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -275,7 +275,7 @@ public virtual Response GetRequiredAmlFSSubnet /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -301,7 +301,7 @@ public virtual AsyncPageable GetStorageCacheSkusAsync(Cancellat /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -327,7 +327,7 @@ public virtual Pageable GetStorageCacheSkus(CancellationToken c /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -353,7 +353,7 @@ public virtual AsyncPageable GetUsageModelsAsync(Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -379,7 +379,7 @@ public virtual Pageable GetUsageModels(CancellationToken /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -406,7 +406,7 @@ public virtual AsyncPageable GetStorageCacheUsagesAsync(Azure /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -433,7 +433,7 @@ public virtual Pageable GetStorageCacheUsages(AzureLocation l /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -463,7 +463,7 @@ public virtual AsyncPageable GetStorageCachesAsync(Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/StorageCacheExtensions.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/StorageCacheExtensions.cs index fa091808f346..7a6df7e09f61 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/StorageCacheExtensions.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Extensions/StorageCacheExtensions.cs @@ -52,6 +52,25 @@ public static AmlFileSystemResource GetAmlFileSystemResource(this ArmClient clie return GetMockableStorageCacheArmClient(client).GetAmlFileSystemResource(id); } + /// + /// Gets an object representing an along with the instance operations that can be performed on it but with no data. + /// You can use to create an from its components. + /// + /// Mocking + /// To mock this method, please mock instead. + /// + /// + /// The instance the method will execute against. + /// The resource ID of the resource to get. + /// is null. + /// Returns a object. + public static AutoExportJobResource GetAutoExportJobResource(this ArmClient client, ResourceIdentifier id) + { + Argument.AssertNotNull(client, nameof(client)); + + return GetMockableStorageCacheArmClient(client).GetAutoExportJobResource(id); + } + /// /// Gets an object representing a along with the instance operations that can be performed on it but with no data. /// You can use to create a from its components. @@ -71,6 +90,25 @@ public static StorageCacheImportJobResource GetStorageCacheImportJobResource(thi return GetMockableStorageCacheArmClient(client).GetStorageCacheImportJobResource(id); } + /// + /// Gets an object representing an along with the instance operations that can be performed on it but with no data. + /// You can use to create an from its components. + /// + /// Mocking + /// To mock this method, please mock instead. + /// + /// + /// The instance the method will execute against. + /// The resource ID of the resource to get. + /// is null. + /// Returns a object. + public static AutoImportJobResource GetAutoImportJobResource(this ArmClient client, ResourceIdentifier id) + { + Argument.AssertNotNull(client, nameof(client)); + + return GetMockableStorageCacheArmClient(client).GetAutoImportJobResource(id); + } + /// /// Gets an object representing a along with the instance operations that can be performed on it but with no data. /// You can use to create a from its components. @@ -139,7 +177,7 @@ public static AmlFileSystemCollection GetAmlFileSystems(this ResourceGroupResour /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -177,7 +215,7 @@ public static async Task> GetAmlFileSystemAsync( /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -232,7 +270,7 @@ public static StorageCacheCollection GetStorageCaches(this ResourceGroupResource /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -270,7 +308,7 @@ public static async Task> GetStorageCacheAsync(th /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -308,7 +346,7 @@ public static Response GetStorageCache(this ResourceGroupR /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -344,7 +382,7 @@ public static AsyncPageable GetAmlFileSystemsAsync(this S /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -380,7 +418,7 @@ public static Pageable GetAmlFileSystems(this Subscriptio /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -412,7 +450,7 @@ public static async Task CheckAmlFSSubnetsAsync(this SubscriptionResou /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -444,7 +482,7 @@ public static Response CheckAmlFSSubnets(this SubscriptionResource subscriptionR /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -476,7 +514,7 @@ public static async Task> GetRequired /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -508,7 +546,7 @@ public static Response GetRequiredAmlFSSubnets /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -540,7 +578,7 @@ public static AsyncPageable GetStorageCacheSkusAsync(this Subsc /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -572,7 +610,7 @@ public static Pageable GetStorageCacheSkus(this SubscriptionRes /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -604,7 +642,7 @@ public static AsyncPageable GetUsageModelsAsync(this Sub /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -636,7 +674,7 @@ public static Pageable GetUsageModels(this SubscriptionR /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -669,7 +707,7 @@ public static AsyncPageable GetStorageCacheUsagesAsync(this S /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// @@ -702,7 +740,7 @@ public static Pageable GetStorageCacheUsages(this Subscriptio /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -738,7 +776,7 @@ public static AsyncPageable GetStorageCachesAsync(this Sub /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoExportJobOperationSource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoExportJobOperationSource.cs new file mode 100644 index 000000000000..4d1578feefb1 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoExportJobOperationSource.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.ClientModel.Primitives; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache +{ + internal class AutoExportJobOperationSource : IOperationSource + { + private readonly ArmClient _client; + + internal AutoExportJobOperationSource(ArmClient client) + { + _client = client; + } + + AutoExportJobResource IOperationSource.CreateResult(Response response, CancellationToken cancellationToken) + { + var data = ModelReaderWriter.Read(response.Content); + return new AutoExportJobResource(_client, data); + } + + async ValueTask IOperationSource.CreateResultAsync(Response response, CancellationToken cancellationToken) + { + var data = ModelReaderWriter.Read(response.Content); + return await Task.FromResult(new AutoExportJobResource(_client, data)).ConfigureAwait(false); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoImportJobOperationSource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoImportJobOperationSource.cs new file mode 100644 index 000000000000..c74227eb9075 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/LongRunningOperation/AutoImportJobOperationSource.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.ClientModel.Primitives; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache +{ + internal class AutoImportJobOperationSource : IOperationSource + { + private readonly ArmClient _client; + + internal AutoImportJobOperationSource(ArmClient client) + { + _client = client; + } + + AutoImportJobResource IOperationSource.CreateResult(Response response, CancellationToken cancellationToken) + { + var data = ModelReaderWriter.Read(response.Content); + return new AutoImportJobResource(_client, data); + } + + async ValueTask IOperationSource.CreateResultAsync(Response response, CancellationToken cancellationToken) + { + var data = ModelReaderWriter.Read(response.Content); + return await Task.FromResult(new AutoImportJobResource(_client, data)).ConfigureAwait(false); + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobAdminStatus.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobAdminStatus.cs new file mode 100644 index 000000000000..967e1526bb6e --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobAdminStatus.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + public readonly partial struct AutoExportJobAdminStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoExportJobAdminStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string EnableValue = "Enable"; + private const string DisableValue = "Disable"; + + /// Enable. + public static AutoExportJobAdminStatus Enable { get; } = new AutoExportJobAdminStatus(EnableValue); + /// Disable. + public static AutoExportJobAdminStatus Disable { get; } = new AutoExportJobAdminStatus(DisableValue); + /// Determines if two values are the same. + public static bool operator ==(AutoExportJobAdminStatus left, AutoExportJobAdminStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoExportJobAdminStatus left, AutoExportJobAdminStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoExportJobAdminStatus(string value) => new AutoExportJobAdminStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoExportJobAdminStatus other && Equals(other); + /// + public bool Equals(AutoExportJobAdminStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.Serialization.cs new file mode 100644 index 000000000000..233b82b5c7c1 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache.Models +{ + public partial class AutoExportJobPatch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobPatch)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(Tags)) + { + writer.WritePropertyName("tags"u8); + writer.WriteStartObject(); + foreach (var item in Tags) + { + writer.WritePropertyName(item.Key); + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } + writer.WriteEndObject(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutoExportJobPatch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobPatch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoExportJobPatch(document.RootElement, options); + } + + internal static AutoExportJobPatch DeserializeAutoExportJobPatch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary tags = default; + AutoExportJobAdminStatus? adminStatus = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + tags = dictionary; + continue; + } + if (property.NameEquals("properties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new AutoExportJobAdminStatus(property0.Value.GetString()); + continue; + } + } + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoExportJobPatch(tags ?? new ChangeTrackingDictionary(), adminStatus, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoExportJobPatch)} does not support writing '{options.Format}' format."); + } + } + + AutoExportJobPatch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoExportJobPatch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoExportJobPatch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.cs new file mode 100644 index 000000000000..4001bf7aeedf --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobPatch.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// An auto export job update instance. + public partial class AutoExportJobPatch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public AutoExportJobPatch() + { + Tags = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Resource tags. + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + /// Keeps track of any properties unknown to the library. + internal AutoExportJobPatch(IDictionary tags, AutoExportJobAdminStatus? adminStatus, IDictionary serializedAdditionalRawData) + { + Tags = tags; + AdminStatus = adminStatus; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Resource tags. + public IDictionary Tags { get; } + /// The administrative status of the auto export job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto export job. By default it is set to 'Enable'. + public AutoExportJobAdminStatus? AdminStatus { get; set; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobProvisioningStateType.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobProvisioningStateType.cs new file mode 100644 index 000000000000..13b5c5b9b33f --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobProvisioningStateType.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// ARM provisioning state. + public readonly partial struct AutoExportJobProvisioningStateType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoExportJobProvisioningStateType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string SucceededValue = "Succeeded"; + private const string FailedValue = "Failed"; + private const string CreatingValue = "Creating"; + private const string DeletingValue = "Deleting"; + private const string UpdatingValue = "Updating"; + private const string CanceledValue = "Canceled"; + + /// Succeeded. + public static AutoExportJobProvisioningStateType Succeeded { get; } = new AutoExportJobProvisioningStateType(SucceededValue); + /// Failed. + public static AutoExportJobProvisioningStateType Failed { get; } = new AutoExportJobProvisioningStateType(FailedValue); + /// Creating. + public static AutoExportJobProvisioningStateType Creating { get; } = new AutoExportJobProvisioningStateType(CreatingValue); + /// Deleting. + public static AutoExportJobProvisioningStateType Deleting { get; } = new AutoExportJobProvisioningStateType(DeletingValue); + /// Updating. + public static AutoExportJobProvisioningStateType Updating { get; } = new AutoExportJobProvisioningStateType(UpdatingValue); + /// Canceled. + public static AutoExportJobProvisioningStateType Canceled { get; } = new AutoExportJobProvisioningStateType(CanceledValue); + /// Determines if two values are the same. + public static bool operator ==(AutoExportJobProvisioningStateType left, AutoExportJobProvisioningStateType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoExportJobProvisioningStateType left, AutoExportJobProvisioningStateType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoExportJobProvisioningStateType(string value) => new AutoExportJobProvisioningStateType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoExportJobProvisioningStateType other && Equals(other); + /// + public bool Equals(AutoExportJobProvisioningStateType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.Serialization.cs new file mode 100644 index 000000000000..2074e7ab385d --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.Serialization.cs @@ -0,0 +1,154 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache.Models +{ + internal partial class AutoExportJobsListResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobsListResult)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(NextLink)) + { + writer.WritePropertyName("nextLink"u8); + writer.WriteStringValue(NextLink); + } + if (Optional.IsCollectionDefined(Value)) + { + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Value) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutoExportJobsListResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoExportJobsListResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoExportJobsListResult(document.RootElement, options); + } + + internal static AutoExportJobsListResult DeserializeAutoExportJobsListResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string nextLink = default; + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("nextLink"u8)) + { + nextLink = property.Value.GetString(); + continue; + } + if (property.NameEquals("value"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(AutoExportJobData.DeserializeAutoExportJobData(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoExportJobsListResult(nextLink, value ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoExportJobsListResult)} does not support writing '{options.Format}' format."); + } + } + + AutoExportJobsListResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoExportJobsListResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoExportJobsListResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.cs new file mode 100644 index 000000000000..7ef40ef7fb83 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportJobsListResult.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// Result of the request to list auto export jobs. It contains a list of auto export jobs and a URL link to get the next set of results. + internal partial class AutoExportJobsListResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal AutoExportJobsListResult() + { + Value = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// URL to get the next set of auto export job list results, if there are any. + /// List of auto export jobs. + /// Keeps track of any properties unknown to the library. + internal AutoExportJobsListResult(string nextLink, IReadOnlyList value, IDictionary serializedAdditionalRawData) + { + NextLink = nextLink; + Value = value; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// URL to get the next set of auto export job list results, if there are any. + public string NextLink { get; } + /// List of auto export jobs. + public IReadOnlyList Value { get; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportStatusType.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportStatusType.cs new file mode 100644 index 000000000000..7cdc9e349b50 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoExportStatusType.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The operational state of auto export. InProgress indicates the export is running. Disabling indicates the user has requested to disable the export but the disabling is still in progress. Disabled indicates auto export has been disabled. DisableFailed indicates the disabling has failed. Failed means the export was unable to continue, due to a fatal error. + public readonly partial struct AutoExportStatusType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoExportStatusType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string InProgressValue = "InProgress"; + private const string DisablingValue = "Disabling"; + private const string DisabledValue = "Disabled"; + private const string DisableFailedValue = "DisableFailed"; + private const string FailedValue = "Failed"; + + /// InProgress. + public static AutoExportStatusType InProgress { get; } = new AutoExportStatusType(InProgressValue); + /// Disabling. + public static AutoExportStatusType Disabling { get; } = new AutoExportStatusType(DisablingValue); + /// Disabled. + public static AutoExportStatusType Disabled { get; } = new AutoExportStatusType(DisabledValue); + /// DisableFailed. + public static AutoExportStatusType DisableFailed { get; } = new AutoExportStatusType(DisableFailedValue); + /// Failed. + public static AutoExportStatusType Failed { get; } = new AutoExportStatusType(FailedValue); + /// Determines if two values are the same. + public static bool operator ==(AutoExportStatusType left, AutoExportStatusType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoExportStatusType left, AutoExportStatusType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoExportStatusType(string value) => new AutoExportStatusType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoExportStatusType other && Equals(other); + /// + public bool Equals(AutoExportStatusType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.Serialization.cs new file mode 100644 index 000000000000..8f7ba5008246 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache.Models +{ + public partial class AutoImportJobPatch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobPatch)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(Tags)) + { + writer.WritePropertyName("tags"u8); + writer.WriteStartObject(); + foreach (var item in Tags) + { + writer.WritePropertyName(item.Key); + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } + writer.WriteEndObject(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutoImportJobPatch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobPatch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoImportJobPatch(document.RootElement, options); + } + + internal static AutoImportJobPatch DeserializeAutoImportJobPatch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary tags = default; + AutoImportJobUpdatePropertiesAdminStatus? adminStatus = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + tags = dictionary; + continue; + } + if (property.NameEquals("properties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new AutoImportJobUpdatePropertiesAdminStatus(property0.Value.GetString()); + continue; + } + } + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoImportJobPatch(tags ?? new ChangeTrackingDictionary(), adminStatus, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoImportJobPatch)} does not support writing '{options.Format}' format."); + } + } + + AutoImportJobPatch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoImportJobPatch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoImportJobPatch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.cs new file mode 100644 index 000000000000..258bcd4ea0a5 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPatch.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// An auto import job update instance. + public partial class AutoImportJobPatch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public AutoImportJobPatch() + { + Tags = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Resource tags. + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + /// Keeps track of any properties unknown to the library. + internal AutoImportJobPatch(IDictionary tags, AutoImportJobUpdatePropertiesAdminStatus? adminStatus, IDictionary serializedAdditionalRawData) + { + Tags = tags; + AdminStatus = adminStatus; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Resource tags. + public IDictionary Tags { get; } + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + public AutoImportJobUpdatePropertiesAdminStatus? AdminStatus { get; set; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesAdminStatus.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesAdminStatus.cs new file mode 100644 index 000000000000..6f0b4756b5f9 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesAdminStatus.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + public readonly partial struct AutoImportJobPropertiesAdminStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoImportJobPropertiesAdminStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string EnableValue = "Enable"; + private const string DisableValue = "Disable"; + + /// Enable. + public static AutoImportJobPropertiesAdminStatus Enable { get; } = new AutoImportJobPropertiesAdminStatus(EnableValue); + /// Disable. + public static AutoImportJobPropertiesAdminStatus Disable { get; } = new AutoImportJobPropertiesAdminStatus(DisableValue); + /// Determines if two values are the same. + public static bool operator ==(AutoImportJobPropertiesAdminStatus left, AutoImportJobPropertiesAdminStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoImportJobPropertiesAdminStatus left, AutoImportJobPropertiesAdminStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoImportJobPropertiesAdminStatus(string value) => new AutoImportJobPropertiesAdminStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoImportJobPropertiesAdminStatus other && Equals(other); + /// + public bool Equals(AutoImportJobPropertiesAdminStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesProvisioningState.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesProvisioningState.cs new file mode 100644 index 000000000000..531f3f4898c5 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesProvisioningState.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// ARM provisioning state. + public readonly partial struct AutoImportJobPropertiesProvisioningState : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoImportJobPropertiesProvisioningState(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string SucceededValue = "Succeeded"; + private const string FailedValue = "Failed"; + private const string CreatingValue = "Creating"; + private const string DeletingValue = "Deleting"; + private const string UpdatingValue = "Updating"; + private const string CanceledValue = "Canceled"; + + /// Succeeded. + public static AutoImportJobPropertiesProvisioningState Succeeded { get; } = new AutoImportJobPropertiesProvisioningState(SucceededValue); + /// Failed. + public static AutoImportJobPropertiesProvisioningState Failed { get; } = new AutoImportJobPropertiesProvisioningState(FailedValue); + /// Creating. + public static AutoImportJobPropertiesProvisioningState Creating { get; } = new AutoImportJobPropertiesProvisioningState(CreatingValue); + /// Deleting. + public static AutoImportJobPropertiesProvisioningState Deleting { get; } = new AutoImportJobPropertiesProvisioningState(DeletingValue); + /// Updating. + public static AutoImportJobPropertiesProvisioningState Updating { get; } = new AutoImportJobPropertiesProvisioningState(UpdatingValue); + /// Canceled. + public static AutoImportJobPropertiesProvisioningState Canceled { get; } = new AutoImportJobPropertiesProvisioningState(CanceledValue); + /// Determines if two values are the same. + public static bool operator ==(AutoImportJobPropertiesProvisioningState left, AutoImportJobPropertiesProvisioningState right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoImportJobPropertiesProvisioningState left, AutoImportJobPropertiesProvisioningState right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoImportJobPropertiesProvisioningState(string value) => new AutoImportJobPropertiesProvisioningState(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoImportJobPropertiesProvisioningState other && Equals(other); + /// + public bool Equals(AutoImportJobPropertiesProvisioningState other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.Serialization.cs new file mode 100644 index 000000000000..939d879f816b --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.Serialization.cs @@ -0,0 +1,327 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache.Models +{ + public partial class AutoImportJobPropertiesStatusBlobSyncEvents : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobPropertiesStatusBlobSyncEvents)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(ImportedFiles)) + { + writer.WritePropertyName("importedFiles"u8); + writer.WriteNumberValue(ImportedFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedDirectories)) + { + writer.WritePropertyName("importedDirectories"u8); + writer.WriteNumberValue(ImportedDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedSymlinks)) + { + writer.WritePropertyName("importedSymlinks"u8); + writer.WriteNumberValue(ImportedSymlinks.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingFiles)) + { + writer.WritePropertyName("preexistingFiles"u8); + writer.WriteNumberValue(PreexistingFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingDirectories)) + { + writer.WritePropertyName("preexistingDirectories"u8); + writer.WriteNumberValue(PreexistingDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingSymlinks)) + { + writer.WritePropertyName("preexistingSymlinks"u8); + writer.WriteNumberValue(PreexistingSymlinks.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalBlobsImported)) + { + writer.WritePropertyName("totalBlobsImported"u8); + writer.WriteNumberValue(TotalBlobsImported.Value); + } + if (options.Format != "W" && Optional.IsDefined(RateOfBlobImport)) + { + writer.WritePropertyName("rateOfBlobImport"u8); + writer.WriteNumberValue(RateOfBlobImport.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalErrors)) + { + writer.WritePropertyName("totalErrors"u8); + writer.WriteNumberValue(TotalErrors.Value); + } + if (options.Format != "W" && Optional.IsDefined(TotalConflicts)) + { + writer.WritePropertyName("totalConflicts"u8); + writer.WriteNumberValue(TotalConflicts.Value); + } + if (options.Format != "W" && Optional.IsDefined(Deletions)) + { + writer.WritePropertyName("deletions"u8); + writer.WriteNumberValue(Deletions.Value); + } + if (options.Format != "W" && Optional.IsDefined(LastChangeFeedEventConsumedOn)) + { + writer.WritePropertyName("lastChangeFeedEventConsumedTime"u8); + writer.WriteStringValue(LastChangeFeedEventConsumedOn.Value, "O"); + } + if (options.Format != "W" && Optional.IsDefined(LastTimeFullySynchronized)) + { + writer.WritePropertyName("lastTimeFullySynchronized"u8); + writer.WriteStringValue(LastTimeFullySynchronized.Value, "O"); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutoImportJobPropertiesStatusBlobSyncEvents IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobPropertiesStatusBlobSyncEvents)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoImportJobPropertiesStatusBlobSyncEvents(document.RootElement, options); + } + + internal static AutoImportJobPropertiesStatusBlobSyncEvents DeserializeAutoImportJobPropertiesStatusBlobSyncEvents(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long? importedFiles = default; + long? importedDirectories = default; + long? importedSymlinks = default; + long? preexistingFiles = default; + long? preexistingDirectories = default; + long? preexistingSymlinks = default; + long? totalBlobsImported = default; + long? rateOfBlobImport = default; + long? totalErrors = default; + long? totalConflicts = default; + long? deletions = default; + DateTimeOffset? lastChangeFeedEventConsumedTime = default; + DateTimeOffset? lastTimeFullySynchronized = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("importedFiles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedFiles = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("importedDirectories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedDirectories = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("importedSymlinks"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedSymlinks = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("preexistingFiles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingFiles = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("preexistingDirectories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingDirectories = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("preexistingSymlinks"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingSymlinks = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("totalBlobsImported"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalBlobsImported = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("rateOfBlobImport"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rateOfBlobImport = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("totalErrors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalErrors = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("totalConflicts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + totalConflicts = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("deletions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + deletions = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("lastChangeFeedEventConsumedTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastChangeFeedEventConsumedTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastTimeFullySynchronized"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastTimeFullySynchronized = property.Value.GetDateTimeOffset("O"); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoImportJobPropertiesStatusBlobSyncEvents( + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, + totalBlobsImported, + rateOfBlobImport, + totalErrors, + totalConflicts, + deletions, + lastChangeFeedEventConsumedTime, + lastTimeFullySynchronized, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoImportJobPropertiesStatusBlobSyncEvents)} does not support writing '{options.Format}' format."); + } + } + + AutoImportJobPropertiesStatusBlobSyncEvents IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoImportJobPropertiesStatusBlobSyncEvents(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoImportJobPropertiesStatusBlobSyncEvents)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.cs new file mode 100644 index 000000000000..f87fa218d16a --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobPropertiesStatusBlobSyncEvents.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The storage account blob change feed status of the auto import job. + public partial class AutoImportJobPropertiesStatusBlobSyncEvents + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal AutoImportJobPropertiesStatusBlobSyncEvents() + { + } + + /// Initializes a new instance of . + /// Number of files imported during auto import. + /// Number of directories imported during auto import. + /// Number of symlinks imported during auto import. + /// Number of preexisting files during auto import. + /// Number of preexisting directories during auto import. + /// Number of preexisting symlinks during auto import. + /// Total number of blobs imported during auto import. + /// Rate of blob import per second during auto import. + /// Total errors encountered during auto import. + /// Total conflicts encountered during auto import. + /// Number of deletions during auto import. + /// Date and time of the last Change Feed event consumed. + /// Date and time when last fully synchronized. + /// Keeps track of any properties unknown to the library. + internal AutoImportJobPropertiesStatusBlobSyncEvents(long? importedFiles, long? importedDirectories, long? importedSymlinks, long? preexistingFiles, long? preexistingDirectories, long? preexistingSymlinks, long? totalBlobsImported, long? rateOfBlobImport, long? totalErrors, long? totalConflicts, long? deletions, DateTimeOffset? lastChangeFeedEventConsumedOn, DateTimeOffset? lastTimeFullySynchronized, IDictionary serializedAdditionalRawData) + { + ImportedFiles = importedFiles; + ImportedDirectories = importedDirectories; + ImportedSymlinks = importedSymlinks; + PreexistingFiles = preexistingFiles; + PreexistingDirectories = preexistingDirectories; + PreexistingSymlinks = preexistingSymlinks; + TotalBlobsImported = totalBlobsImported; + RateOfBlobImport = rateOfBlobImport; + TotalErrors = totalErrors; + TotalConflicts = totalConflicts; + Deletions = deletions; + LastChangeFeedEventConsumedOn = lastChangeFeedEventConsumedOn; + LastTimeFullySynchronized = lastTimeFullySynchronized; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Number of files imported during auto import. + public long? ImportedFiles { get; } + /// Number of directories imported during auto import. + public long? ImportedDirectories { get; } + /// Number of symlinks imported during auto import. + public long? ImportedSymlinks { get; } + /// Number of preexisting files during auto import. + public long? PreexistingFiles { get; } + /// Number of preexisting directories during auto import. + public long? PreexistingDirectories { get; } + /// Number of preexisting symlinks during auto import. + public long? PreexistingSymlinks { get; } + /// Total number of blobs imported during auto import. + public long? TotalBlobsImported { get; } + /// Rate of blob import per second during auto import. + public long? RateOfBlobImport { get; } + /// Total errors encountered during auto import. + public long? TotalErrors { get; } + /// Total conflicts encountered during auto import. + public long? TotalConflicts { get; } + /// Number of deletions during auto import. + public long? Deletions { get; } + /// Date and time of the last Change Feed event consumed. + public DateTimeOffset? LastChangeFeedEventConsumedOn { get; } + /// Date and time when last fully synchronized. + public DateTimeOffset? LastTimeFullySynchronized { get; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobState.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobState.cs new file mode 100644 index 000000000000..0fc82f3c02d7 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobState.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The state of the auto import operation. + public readonly partial struct AutoImportJobState : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoImportJobState(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string InProgressValue = "InProgress"; + private const string FailedValue = "Failed"; + private const string DisablingValue = "Disabling"; + private const string DisabledValue = "Disabled"; + + /// InProgress. + public static AutoImportJobState InProgress { get; } = new AutoImportJobState(InProgressValue); + /// Failed. + public static AutoImportJobState Failed { get; } = new AutoImportJobState(FailedValue); + /// Disabling. + public static AutoImportJobState Disabling { get; } = new AutoImportJobState(DisablingValue); + /// Disabled. + public static AutoImportJobState Disabled { get; } = new AutoImportJobState(DisabledValue); + /// Determines if two values are the same. + public static bool operator ==(AutoImportJobState left, AutoImportJobState right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoImportJobState left, AutoImportJobState right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoImportJobState(string value) => new AutoImportJobState(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoImportJobState other && Equals(other); + /// + public bool Equals(AutoImportJobState other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobUpdatePropertiesAdminStatus.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobUpdatePropertiesAdminStatus.cs new file mode 100644 index 000000000000..d30dc2209957 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobUpdatePropertiesAdminStatus.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The administrative status of the auto import job. Possible values: 'Enable', 'Disable'. Passing in a value of 'Disable' will disable the current active auto import job. By default it is set to 'Enable'. + public readonly partial struct AutoImportJobUpdatePropertiesAdminStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutoImportJobUpdatePropertiesAdminStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string EnableValue = "Enable"; + private const string DisableValue = "Disable"; + + /// Enable. + public static AutoImportJobUpdatePropertiesAdminStatus Enable { get; } = new AutoImportJobUpdatePropertiesAdminStatus(EnableValue); + /// Disable. + public static AutoImportJobUpdatePropertiesAdminStatus Disable { get; } = new AutoImportJobUpdatePropertiesAdminStatus(DisableValue); + /// Determines if two values are the same. + public static bool operator ==(AutoImportJobUpdatePropertiesAdminStatus left, AutoImportJobUpdatePropertiesAdminStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutoImportJobUpdatePropertiesAdminStatus left, AutoImportJobUpdatePropertiesAdminStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutoImportJobUpdatePropertiesAdminStatus(string value) => new AutoImportJobUpdatePropertiesAdminStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutoImportJobUpdatePropertiesAdminStatus other && Equals(other); + /// + public bool Equals(AutoImportJobUpdatePropertiesAdminStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.Serialization.cs new file mode 100644 index 000000000000..b8ac1ce4fdcc --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.Serialization.cs @@ -0,0 +1,158 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.ResourceManager.StorageCache.Models +{ + internal partial class AutoImportJobsListResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobsListResult)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(Value)) + { + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Value) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(NextLink)) + { + writer.WritePropertyName("nextLink"u8); + writer.WriteStringValue(NextLink.AbsoluteUri); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutoImportJobsListResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutoImportJobsListResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutoImportJobsListResult(document.RootElement, options); + } + + internal static AutoImportJobsListResult DeserializeAutoImportJobsListResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + Uri nextLink = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(AutoImportJobData.DeserializeAutoImportJobData(item, options)); + } + value = array; + continue; + } + if (property.NameEquals("nextLink"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + nextLink = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutoImportJobsListResult(value ?? new ChangeTrackingList(), nextLink, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutoImportJobsListResult)} does not support writing '{options.Format}' format."); + } + } + + AutoImportJobsListResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutoImportJobsListResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutoImportJobsListResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.cs new file mode 100644 index 000000000000..d5852068cd17 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/AutoImportJobsListResult.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// Result of the request to list auto import jobs. It contains a list of auto import jobs and a URL link to get the next set of results. + internal partial class AutoImportJobsListResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal AutoImportJobsListResult() + { + Value = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// List of auto import jobs. + /// URL to get the next set of auto import job list results, if there are any. + /// Keeps track of any properties unknown to the library. + internal AutoImportJobsListResult(IReadOnlyList value, Uri nextLink, IDictionary serializedAdditionalRawData) + { + Value = value; + NextLink = nextLink; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// List of auto import jobs. + public IReadOnlyList Value { get; } + /// URL to get the next set of auto import job list results, if there are any. + public Uri NextLink { get; } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportJobAdminStatus.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportJobAdminStatus.cs new file mode 100644 index 000000000000..a4891c74395c --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportJobAdminStatus.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.StorageCache.Models +{ + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. By default it is set to 'Active'. + public readonly partial struct ImportJobAdminStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ImportJobAdminStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ActiveValue = "Active"; + private const string CancelValue = "Cancel"; + + /// Active. + public static ImportJobAdminStatus Active { get; } = new ImportJobAdminStatus(ActiveValue); + /// Cancel. + public static ImportJobAdminStatus Cancel { get; } = new ImportJobAdminStatus(CancelValue); + /// Determines if two values are the same. + public static bool operator ==(ImportJobAdminStatus left, ImportJobAdminStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ImportJobAdminStatus left, ImportJobAdminStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ImportJobAdminStatus(string value) => new ImportJobAdminStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ImportJobAdminStatus other && Equals(other); + /// + public bool Equals(ImportJobAdminStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportStatusType.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportStatusType.cs index e58f3625ead3..79c830dcb172 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportStatusType.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/ImportStatusType.cs @@ -10,7 +10,7 @@ namespace Azure.ResourceManager.StorageCache.Models { - /// The state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. + /// The operational state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. public readonly partial struct ImportStatusType : IEquatable { private readonly string _value; diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.Serialization.cs index 9811b8908935..eeb0a7362ea1 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.Serialization.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.Serialization.cs @@ -45,6 +45,14 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit } writer.WriteEndObject(); } + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } + writer.WriteEndObject(); if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -83,6 +91,7 @@ internal static StorageCacheImportJobPatch DeserializeStorageCacheImportJobPatch return null; } IDictionary tags = default; + ImportJobAdminStatus? adminStatus = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -101,13 +110,34 @@ internal static StorageCacheImportJobPatch DeserializeStorageCacheImportJobPatch tags = dictionary; continue; } + if (property.NameEquals("properties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new ImportJobAdminStatus(property0.Value.GetString()); + continue; + } + } + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new StorageCacheImportJobPatch(tags ?? new ChangeTrackingDictionary(), serializedAdditionalRawData); + return new StorageCacheImportJobPatch(tags ?? new ChangeTrackingDictionary(), adminStatus, serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.cs index bdc151ef46e5..31a63601a904 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/Models/StorageCacheImportJobPatch.cs @@ -53,14 +53,18 @@ public StorageCacheImportJobPatch() /// Initializes a new instance of . /// Resource tags. + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. /// Keeps track of any properties unknown to the library. - internal StorageCacheImportJobPatch(IDictionary tags, IDictionary serializedAdditionalRawData) + internal StorageCacheImportJobPatch(IDictionary tags, ImportJobAdminStatus? adminStatus, IDictionary serializedAdditionalRawData) { Tags = tags; + AdminStatus = adminStatus; _serializedAdditionalRawData = serializedAdditionalRawData; } /// Resource tags. public IDictionary Tags { get; } + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. + public ImportJobAdminStatus? AdminStatus { get; set; } } } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AmlFilesystemsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AmlFilesystemsRestOperations.cs index 7f990c2d18fc..cadb28dca5ff 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AmlFilesystemsRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AmlFilesystemsRestOperations.cs @@ -32,7 +32,7 @@ public AmlFilesystemsRestOperations(HttpPipeline pipeline, string applicationId, { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AscUsagesRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AscUsagesRestOperations.cs index 32abb53861a7..4b1853351d32 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AscUsagesRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AscUsagesRestOperations.cs @@ -32,7 +32,7 @@ public AscUsagesRestOperations(HttpPipeline pipeline, string applicationId, Uri { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoExportJobsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoExportJobsRestOperations.cs new file mode 100644 index 000000000000..b18745b14ad8 --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoExportJobsRestOperations.cs @@ -0,0 +1,613 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + internal partial class AutoExportJobsRestOperations + { + private readonly TelemetryDetails _userAgent; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// Initializes a new instance of AutoExportJobsRestOperations. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The application id to use for user agent. + /// server parameter. + /// Api Version. + /// or is null. + public AutoExportJobsRestOperations(HttpPipeline pipeline, string applicationId, Uri endpoint = null, string apiVersion = default) + { + _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); + _endpoint = endpoint ?? new Uri("https://management.azure.com"); + _apiVersion = apiVersion ?? "2025-07-01"; + _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); + } + + internal RequestUriBuilder CreateDeleteRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateDeleteRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Schedules an auto export job for deletion. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task DeleteAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var message = CreateDeleteRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 202: + case 204: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Schedules an auto export job for deletion. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Delete(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var message = CreateDeleteRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 202: + case 204: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateGetRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateGetRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns an auto export job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task> GetAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var message = CreateGetRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobData value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoExportJobData.DeserializeAutoExportJobData(document.RootElement); + return Response.FromValue(value, message.Response); + } + case 404: + return Response.FromValue((AutoExportJobData)null, message.Response); + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns an auto export job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Get(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + + using var message = CreateGetRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobData value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoExportJobData.DeserializeAutoExportJobData(document.RootElement); + return Response.FromValue(value, message.Response); + } + case 404: + return Response.FromValue((AutoExportJobData)null, message.Response); + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateCreateOrUpdateRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobData data) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateCreateOrUpdateRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobData data) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(data, ModelSerializationExtensions.WireOptions); + request.Content = content; + _userAgent.Apply(message); + return message; + } + + /// Create or update an auto export job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task CreateOrUpdateAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var message = CreateCreateOrUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName, data); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + case 201: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Create or update an auto export job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response CreateOrUpdate(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var message = CreateCreateOrUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName, data); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + case 201: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateUpdateRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobPatch patch) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateUpdateRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobPatch patch) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs/", false); + uri.AppendPath(autoExportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(patch, ModelSerializationExtensions.WireOptions); + request.Content = content; + _userAgent.Apply(message); + return message; + } + + /// Update an auto export job instance. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task UpdateAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(patch, nameof(patch)); + + using var message = CreateUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName, patch); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + case 202: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Update an auto export job instance. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto export job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto export job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Update(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoExportJobName, AutoExportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoExportJobName, nameof(autoExportJobName)); + Argument.AssertNotNull(patch, nameof(patch)); + + using var message = CreateUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoExportJobName, patch); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + case 202: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateListByAmlFileSystemRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs", false); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateListByAmlFileSystemRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoExportJobs", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns all the auto export jobs the user has access to under an AML File System. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , or is null. + /// , or is an empty string, and was expected to be non-empty. + public async Task> ListByAmlFileSystemAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemRequest(subscriptionId, resourceGroupName, amlFileSystemName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobsListResult value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoExportJobsListResult.DeserializeAutoExportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns all the auto export jobs the user has access to under an AML File System. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , or is null. + /// , or is an empty string, and was expected to be non-empty. + public Response ListByAmlFileSystem(string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemRequest(subscriptionId, resourceGroupName, amlFileSystemName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobsListResult value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoExportJobsListResult.DeserializeAutoExportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateListByAmlFileSystemNextPageRequestUri(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRawNextLink(nextLink, false); + return uri; + } + + internal HttpMessage CreateListByAmlFileSystemNextPageRequest(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRawNextLink(nextLink, false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns all the auto export jobs the user has access to under an AML File System. + /// The URL to the next page of results. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , or is an empty string, and was expected to be non-empty. + public async Task> ListByAmlFileSystemNextPageAsync(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(nextLink, nameof(nextLink)); + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemNextPageRequest(nextLink, subscriptionId, resourceGroupName, amlFileSystemName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobsListResult value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoExportJobsListResult.DeserializeAutoExportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns all the auto export jobs the user has access to under an AML File System. + /// The URL to the next page of results. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , or is an empty string, and was expected to be non-empty. + public Response ListByAmlFileSystemNextPage(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(nextLink, nameof(nextLink)); + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemNextPageRequest(nextLink, subscriptionId, resourceGroupName, amlFileSystemName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoExportJobsListResult value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoExportJobsListResult.DeserializeAutoExportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoImportJobsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoImportJobsRestOperations.cs new file mode 100644 index 000000000000..e3511d62c9cc --- /dev/null +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/AutoImportJobsRestOperations.cs @@ -0,0 +1,613 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.ResourceManager.StorageCache.Models; + +namespace Azure.ResourceManager.StorageCache +{ + internal partial class AutoImportJobsRestOperations + { + private readonly TelemetryDetails _userAgent; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// Initializes a new instance of AutoImportJobsRestOperations. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The application id to use for user agent. + /// server parameter. + /// Api Version. + /// or is null. + public AutoImportJobsRestOperations(HttpPipeline pipeline, string applicationId, Uri endpoint = null, string apiVersion = default) + { + _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); + _endpoint = endpoint ?? new Uri("https://management.azure.com"); + _apiVersion = apiVersion ?? "2025-07-01"; + _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); + } + + internal RequestUriBuilder CreateDeleteRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateDeleteRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Schedules an auto import job for deletion. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task DeleteAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var message = CreateDeleteRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 202: + case 204: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Schedules an auto import job for deletion. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Delete(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var message = CreateDeleteRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 202: + case 204: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateGetRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateGetRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns an auto import job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task> GetAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var message = CreateGetRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobData value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoImportJobData.DeserializeAutoImportJobData(document.RootElement); + return Response.FromValue(value, message.Response); + } + case 404: + return Response.FromValue((AutoImportJobData)null, message.Response); + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns an auto import job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Get(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + + using var message = CreateGetRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobData value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoImportJobData.DeserializeAutoImportJobData(document.RootElement); + return Response.FromValue(value, message.Response); + } + case 404: + return Response.FromValue((AutoImportJobData)null, message.Response); + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateCreateOrUpdateRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobData data) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateCreateOrUpdateRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobData data) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(data, ModelSerializationExtensions.WireOptions); + request.Content = content; + _userAgent.Apply(message); + return message; + } + + /// Create or update an auto import job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task CreateOrUpdateAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var message = CreateCreateOrUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName, data); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + case 201: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Create or update an auto import job. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response CreateOrUpdate(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobData data, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(data, nameof(data)); + + using var message = CreateCreateOrUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName, data); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + case 201: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateUpdateRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobPatch patch) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateUpdateRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobPatch patch) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs/", false); + uri.AppendPath(autoImportJobName, true); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(patch, ModelSerializationExtensions.WireOptions); + request.Content = content; + _userAgent.Apply(message); + return message; + } + + /// Update an auto import job instance. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public async Task UpdateAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(patch, nameof(patch)); + + using var message = CreateUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName, patch); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + case 202: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + /// Update an auto import job instance. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Name for the auto import job. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// Object containing the user-selectable properties of the auto import job. If read-only properties are included, they must match the existing values of those properties. + /// The cancellation token to use. + /// , , , or is null. + /// , , or is an empty string, and was expected to be non-empty. + public Response Update(string subscriptionId, string resourceGroupName, string amlFileSystemName, string autoImportJobName, AutoImportJobPatch patch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + Argument.AssertNotNullOrEmpty(autoImportJobName, nameof(autoImportJobName)); + Argument.AssertNotNull(patch, nameof(patch)); + + using var message = CreateUpdateRequest(subscriptionId, resourceGroupName, amlFileSystemName, autoImportJobName, patch); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + case 202: + return message.Response; + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateListByAmlFileSystemRequestUri(string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs", false); + uri.AppendQuery("api-version", _apiVersion, true); + return uri; + } + + internal HttpMessage CreateListByAmlFileSystemRequest(string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/subscriptions/", false); + uri.AppendPath(subscriptionId, true); + uri.AppendPath("/resourceGroups/", false); + uri.AppendPath(resourceGroupName, true); + uri.AppendPath("/providers/Microsoft.StorageCache/amlFilesystems/", false); + uri.AppendPath(amlFileSystemName, true); + uri.AppendPath("/autoImportJobs", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns all the auto import jobs the user has access to under an AML File System. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , or is null. + /// , or is an empty string, and was expected to be non-empty. + public async Task> ListByAmlFileSystemAsync(string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemRequest(subscriptionId, resourceGroupName, amlFileSystemName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobsListResult value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoImportJobsListResult.DeserializeAutoImportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns all the auto import jobs the user has access to under an AML File System. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , or is null. + /// , or is an empty string, and was expected to be non-empty. + public Response ListByAmlFileSystem(string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemRequest(subscriptionId, resourceGroupName, amlFileSystemName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobsListResult value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoImportJobsListResult.DeserializeAutoImportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + internal RequestUriBuilder CreateListByAmlFileSystemNextPageRequestUri(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRawNextLink(nextLink, false); + return uri; + } + + internal HttpMessage CreateListByAmlFileSystemNextPageRequest(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName) + { + var message = _pipeline.CreateMessage(); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRawNextLink(nextLink, false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + _userAgent.Apply(message); + return message; + } + + /// Returns all the auto import jobs the user has access to under an AML File System. + /// The URL to the next page of results. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , or is an empty string, and was expected to be non-empty. + public async Task> ListByAmlFileSystemNextPageAsync(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(nextLink, nameof(nextLink)); + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemNextPageRequest(nextLink, subscriptionId, resourceGroupName, amlFileSystemName); + await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobsListResult value = default; + using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); + value = AutoImportJobsListResult.DeserializeAutoImportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + + /// Returns all the auto import jobs the user has access to under an AML File System. + /// The URL to the next page of results. + /// The ID of the target subscription. + /// The name of the resource group. The name is case insensitive. + /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. + /// The cancellation token to use. + /// , , or is null. + /// , or is an empty string, and was expected to be non-empty. + public Response ListByAmlFileSystemNextPage(string nextLink, string subscriptionId, string resourceGroupName, string amlFileSystemName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(nextLink, nameof(nextLink)); + Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); + Argument.AssertNotNullOrEmpty(resourceGroupName, nameof(resourceGroupName)); + Argument.AssertNotNullOrEmpty(amlFileSystemName, nameof(amlFileSystemName)); + + using var message = CreateListByAmlFileSystemNextPageRequest(nextLink, subscriptionId, resourceGroupName, amlFileSystemName); + _pipeline.Send(message, cancellationToken); + switch (message.Response.Status) + { + case 200: + { + AutoImportJobsListResult value = default; + using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); + value = AutoImportJobsListResult.DeserializeAutoImportJobsListResult(document.RootElement); + return Response.FromValue(value, message.Response); + } + default: + throw new RequestFailedException(message.Response); + } + } + } +} diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/CachesRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/CachesRestOperations.cs index f82ea970d3c9..0b77ef64851f 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/CachesRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/CachesRestOperations.cs @@ -33,7 +33,7 @@ public CachesRestOperations(HttpPipeline pipeline, string applicationId, Uri end { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/ImportJobsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/ImportJobsRestOperations.cs index 4d99dac134ee..649dff621433 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/ImportJobsRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/ImportJobsRestOperations.cs @@ -32,7 +32,7 @@ public ImportJobsRestOperations(HttpPipeline pipeline, string applicationId, Uri { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } @@ -274,7 +274,7 @@ internal HttpMessage CreateCreateOrUpdateRequest(string subscriptionId, string r return message; } - /// Create or update an import job. Import jobs are automatically deleted 72 hours after completion. + /// Create or update an import job. /// The ID of the target subscription. /// The name of the resource group. The name is case insensitive. /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. @@ -303,7 +303,7 @@ public async Task CreateOrUpdateAsync(string subscriptionId, string re } } - /// Create or update an import job. Import jobs are automatically deleted 72 hours after completion. + /// Create or update an import job. /// The ID of the target subscription. /// The name of the resource group. The name is case insensitive. /// Name for the AML file system. Allows alphanumerics, underscores, and hyphens. Start and end with alphanumeric. diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/SkusRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/SkusRestOperations.cs index 31e80406b0ac..bad9e3637088 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/SkusRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/SkusRestOperations.cs @@ -32,7 +32,7 @@ public SkusRestOperations(HttpPipeline pipeline, string applicationId, Uri endpo { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageCacheManagementRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageCacheManagementRestOperations.cs index ec1027ff445f..2aa1203ef9fd 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageCacheManagementRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageCacheManagementRestOperations.cs @@ -32,7 +32,7 @@ public StorageCacheManagementRestOperations(HttpPipeline pipeline, string applic { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageTargetsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageTargetsRestOperations.cs index 6e445f449f85..3d53fb81c938 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageTargetsRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/StorageTargetsRestOperations.cs @@ -32,7 +32,7 @@ public StorageTargetsRestOperations(HttpPipeline pipeline, string applicationId, { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/UsageModelsRestOperations.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/UsageModelsRestOperations.cs index d6ea189ad456..015ce1b9ddca 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/UsageModelsRestOperations.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/RestOperations/UsageModelsRestOperations.cs @@ -32,7 +32,7 @@ public UsageModelsRestOperations(HttpPipeline pipeline, string applicationId, Ur { _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); _endpoint = endpoint ?? new Uri("https://management.azure.com"); - _apiVersion = apiVersion ?? "2024-03-01"; + _apiVersion = apiVersion ?? "2025-07-01"; _userAgent = new TelemetryDetails(GetType().Assembly, applicationId); } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheCollection.cs index 74fc2f546457..3b869aa5416d 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheCollection.cs @@ -65,7 +65,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -114,7 +114,7 @@ public virtual async Task> CreateOrUpdateAsyn /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -163,7 +163,7 @@ public virtual ArmOperation CreateOrUpdate(WaitUntil waitU /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -208,7 +208,7 @@ public virtual async Task> GetAsync(string cacheN /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -253,7 +253,7 @@ public virtual Response Get(string cacheName, Cancellation /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -283,7 +283,7 @@ public virtual AsyncPageable GetAllAsync(CancellationToken /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -313,7 +313,7 @@ public virtual Pageable GetAll(CancellationToken cancellat /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -356,7 +356,7 @@ public virtual async Task> ExistsAsync(string cacheName, Cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -399,7 +399,7 @@ public virtual Response Exists(string cacheName, CancellationToken cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -444,7 +444,7 @@ public virtual async Task> GetIfExistsAsy /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobCollection.cs index 73c18efcfeaf..213b009e168d 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobCollection.cs @@ -52,7 +52,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) } /// - /// Create or update an import job. Import jobs are automatically deleted 72 hours after completion. + /// Create or update an import job. /// /// /// Request Path @@ -64,7 +64,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -101,7 +101,7 @@ public virtual async Task> CreateOrU } /// - /// Create or update an import job. Import jobs are automatically deleted 72 hours after completion. + /// Create or update an import job. /// /// /// Request Path @@ -113,7 +113,7 @@ public virtual async Task> CreateOrU /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -162,7 +162,7 @@ public virtual ArmOperation CreateOrUpdate(WaitUn /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -207,7 +207,7 @@ public virtual async Task> GetAsync(stri /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -252,7 +252,7 @@ public virtual Response Get(string importJobName, /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -282,7 +282,7 @@ public virtual AsyncPageable GetAllAsync(Cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -312,7 +312,7 @@ public virtual Pageable GetAll(CancellationToken /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -355,7 +355,7 @@ public virtual async Task> ExistsAsync(string importJobName, Canc /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -398,7 +398,7 @@ public virtual Response Exists(string importJobName, CancellationToken can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -443,7 +443,7 @@ public virtual async Task> GetIf /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.Serialization.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.Serialization.cs index c0cf5e270182..aa9d36b2a320 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.Serialization.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.Serialization.cs @@ -44,6 +44,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("provisioningState"u8); writer.WriteStringValue(ProvisioningState.Value.ToString()); } + if (Optional.IsDefined(AdminStatus)) + { + writer.WritePropertyName("adminStatus"u8); + writer.WriteStringValue(AdminStatus.Value.ToString()); + } if (Optional.IsCollectionDefined(ImportPrefixes)) { writer.WritePropertyName("importPrefixes"u8); @@ -91,6 +96,36 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("totalBlobsImported"u8); writer.WriteNumberValue(TotalBlobsImported.Value); } + if (options.Format != "W" && Optional.IsDefined(ImportedFiles)) + { + writer.WritePropertyName("importedFiles"u8); + writer.WriteNumberValue(ImportedFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedDirectories)) + { + writer.WritePropertyName("importedDirectories"u8); + writer.WriteNumberValue(ImportedDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(ImportedSymlinks)) + { + writer.WritePropertyName("importedSymlinks"u8); + writer.WriteNumberValue(ImportedSymlinks.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingFiles)) + { + writer.WritePropertyName("preexistingFiles"u8); + writer.WriteNumberValue(PreexistingFiles.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingDirectories)) + { + writer.WritePropertyName("preexistingDirectories"u8); + writer.WriteNumberValue(PreexistingDirectories.Value); + } + if (options.Format != "W" && Optional.IsDefined(PreexistingSymlinks)) + { + writer.WritePropertyName("preexistingSymlinks"u8); + writer.WriteNumberValue(PreexistingSymlinks.Value); + } if (options.Format != "W" && Optional.IsDefined(BlobsImportedPerSecond)) { writer.WritePropertyName("blobsImportedPerSecond"u8); @@ -147,6 +182,7 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J ResourceType type = default; SystemData systemData = default; ImportJobProvisioningStateType? provisioningState = default; + ImportJobAdminStatus? adminStatus = default; IList importPrefixes = default; ConflictResolutionMode? conflictResolutionMode = default; int? maximumErrors = default; @@ -155,6 +191,12 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J long? totalBlobsWalked = default; long? blobsWalkedPerSecond = default; long? totalBlobsImported = default; + long? importedFiles = default; + long? importedDirectories = default; + long? importedSymlinks = default; + long? preexistingFiles = default; + long? preexistingDirectories = default; + long? preexistingSymlinks = default; long? blobsImportedPerSecond = default; DateTimeOffset? lastCompletionTime = default; DateTimeOffset? lastStartedTime = default; @@ -225,6 +267,15 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J provisioningState = new ImportJobProvisioningStateType(property0.Value.GetString()); continue; } + if (property0.NameEquals("adminStatus"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + adminStatus = new ImportJobAdminStatus(property0.Value.GetString()); + continue; + } if (property0.NameEquals("importPrefixes"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -307,6 +358,60 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J totalBlobsImported = property1.Value.GetInt64(); continue; } + if (property1.NameEquals("importedFiles"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedFiles = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("importedDirectories"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedDirectories = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("importedSymlinks"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importedSymlinks = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingFiles"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingFiles = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingDirectories"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingDirectories = property1.Value.GetInt64(); + continue; + } + if (property1.NameEquals("preexistingSymlinks"u8)) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preexistingSymlinks = property1.Value.GetInt64(); + continue; + } if (property1.NameEquals("blobsImportedPerSecond"u8)) { if (property1.Value.ValueKind == JsonValueKind.Null) @@ -372,6 +477,7 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J tags ?? new ChangeTrackingDictionary(), location, provisioningState, + adminStatus, importPrefixes ?? new ChangeTrackingList(), conflictResolutionMode, maximumErrors, @@ -380,6 +486,12 @@ internal static StorageCacheImportJobData DeserializeStorageCacheImportJobData(J totalBlobsWalked, blobsWalkedPerSecond, totalBlobsImported, + importedFiles, + importedDirectories, + importedSymlinks, + preexistingFiles, + preexistingDirectories, + preexistingSymlinks, blobsImportedPerSecond, lastCompletionTime, lastStartedTime, diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.cs index a8303f8cf3de..3bbb2c173a5e 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobData.cs @@ -66,23 +66,31 @@ public StorageCacheImportJobData(AzureLocation location) : base(location) /// The tags. /// The location. /// ARM provisioning state. + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. By default it is set to 'Active'. /// An array of blob paths/prefixes that get imported into the cluster namespace. It has '/' as the default value. /// How the import job will handle conflicts. For example, if the import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or was not previously imported. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/ for a thorough explanation of these resolution modes. /// Total non-conflict oriented errors the import job will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately and is the default. - /// The state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. + /// The operational state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. /// The status message of the import job. /// The total blob objects walked. /// A recent and frequently updated rate of blobs walked per second. /// The total blobs that have been imported since import began. + /// New or modified files that have been imported into the filesystem. + /// New or modified directories that have been imported into the filesystem. + /// Newly added symbolic links into the filesystem. + /// Files that already exist in the filesystem and have not been modified. + /// Directories that already exist in the filesystem and have not been modified. + /// Symbolic links that already exist in the filesystem and have not been modified. /// A recent and frequently updated rate of total files, directories, and symlinks imported per second. - /// The time of the last completed archive operation. - /// The time the latest archive operation started. + /// The time (in UTC) of the last completed import job. + /// The time (in UTC) the latest import job started. /// Number of errors in the import job. /// Number of conflicts in the import job. /// Keeps track of any properties unknown to the library. - internal StorageCacheImportJobData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, IDictionary tags, AzureLocation location, ImportJobProvisioningStateType? provisioningState, IList importPrefixes, ConflictResolutionMode? conflictResolutionMode, int? maximumErrors, ImportStatusType? state, string statusMessage, long? totalBlobsWalked, long? blobsWalkedPerSecond, long? totalBlobsImported, long? blobsImportedPerSecond, DateTimeOffset? lastCompletionOn, DateTimeOffset? lastStartedOn, int? totalErrors, int? totalConflicts, IDictionary serializedAdditionalRawData) : base(id, name, resourceType, systemData, tags, location) + internal StorageCacheImportJobData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, IDictionary tags, AzureLocation location, ImportJobProvisioningStateType? provisioningState, ImportJobAdminStatus? adminStatus, IList importPrefixes, ConflictResolutionMode? conflictResolutionMode, int? maximumErrors, ImportStatusType? state, string statusMessage, long? totalBlobsWalked, long? blobsWalkedPerSecond, long? totalBlobsImported, long? importedFiles, long? importedDirectories, long? importedSymlinks, long? preexistingFiles, long? preexistingDirectories, long? preexistingSymlinks, long? blobsImportedPerSecond, DateTimeOffset? lastCompletionOn, DateTimeOffset? lastStartedOn, int? totalErrors, int? totalConflicts, IDictionary serializedAdditionalRawData) : base(id, name, resourceType, systemData, tags, location) { ProvisioningState = provisioningState; + AdminStatus = adminStatus; ImportPrefixes = importPrefixes; ConflictResolutionMode = conflictResolutionMode; MaximumErrors = maximumErrors; @@ -91,6 +99,12 @@ internal StorageCacheImportJobData(ResourceIdentifier id, string name, ResourceT TotalBlobsWalked = totalBlobsWalked; BlobsWalkedPerSecond = blobsWalkedPerSecond; TotalBlobsImported = totalBlobsImported; + ImportedFiles = importedFiles; + ImportedDirectories = importedDirectories; + ImportedSymlinks = importedSymlinks; + PreexistingFiles = preexistingFiles; + PreexistingDirectories = preexistingDirectories; + PreexistingSymlinks = preexistingSymlinks; BlobsImportedPerSecond = blobsImportedPerSecond; LastCompletionOn = lastCompletionOn; LastStartedOn = lastStartedOn; @@ -106,13 +120,15 @@ internal StorageCacheImportJobData() /// ARM provisioning state. public ImportJobProvisioningStateType? ProvisioningState { get; } + /// The administrative status of the import job. Possible values: 'Active', 'Cancel'. Passing in a value of 'Cancel' will cancel the current active import job. By default it is set to 'Active'. + public ImportJobAdminStatus? AdminStatus { get; set; } /// An array of blob paths/prefixes that get imported into the cluster namespace. It has '/' as the default value. public IList ImportPrefixes { get; } /// How the import job will handle conflicts. For example, if the import job is trying to bring in a directory, but a file is at that path, how it handles it. Fail indicates that the import job should stop immediately and not do anything with the conflict. Skip indicates that it should pass over the conflict. OverwriteIfDirty causes the import job to delete and re-import the file or directory if it is a conflicting type, is dirty, or was not previously imported. OverwriteAlways extends OverwriteIfDirty to include releasing files that had been restored but were not dirty. Please reference https://learn.microsoft.com/en-us/azure/azure-managed-lustre/ for a thorough explanation of these resolution modes. public ConflictResolutionMode? ConflictResolutionMode { get; set; } /// Total non-conflict oriented errors the import job will tolerate before exiting with failure. -1 means infinite. 0 means exit immediately and is the default. public int? MaximumErrors { get; set; } - /// The state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. + /// The operational state of the import job. InProgress indicates the import is still running. Canceled indicates it has been canceled by the user. Completed indicates import finished, successfully importing all discovered blobs into the Lustre namespace. CompletedPartial indicates the import finished but some blobs either were found to be conflicting and could not be imported or other errors were encountered. Failed means the import was unable to complete due to a fatal error. public ImportStatusType? State { get; } /// The status message of the import job. public string StatusMessage { get; } @@ -122,11 +138,23 @@ internal StorageCacheImportJobData() public long? BlobsWalkedPerSecond { get; } /// The total blobs that have been imported since import began. public long? TotalBlobsImported { get; } + /// New or modified files that have been imported into the filesystem. + public long? ImportedFiles { get; } + /// New or modified directories that have been imported into the filesystem. + public long? ImportedDirectories { get; } + /// Newly added symbolic links into the filesystem. + public long? ImportedSymlinks { get; } + /// Files that already exist in the filesystem and have not been modified. + public long? PreexistingFiles { get; } + /// Directories that already exist in the filesystem and have not been modified. + public long? PreexistingDirectories { get; } + /// Symbolic links that already exist in the filesystem and have not been modified. + public long? PreexistingSymlinks { get; } /// A recent and frequently updated rate of total files, directories, and symlinks imported per second. public long? BlobsImportedPerSecond { get; } - /// The time of the last completed archive operation. + /// The time (in UTC) of the last completed import job. public DateTimeOffset? LastCompletionOn { get; } - /// The time the latest archive operation started. + /// The time (in UTC) the latest import job started. public DateTimeOffset? LastStartedOn { get; } /// Number of errors in the import job. public int? TotalErrors { get; } diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobResource.cs index 8f4abcb7ced0..74a40f4613d7 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheImportJobResource.cs @@ -103,7 +103,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -143,7 +143,7 @@ public virtual async Task> GetAsync(Canc /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -183,7 +183,7 @@ public virtual Response Get(CancellationToken can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -225,7 +225,7 @@ public virtual async Task DeleteAsync(WaitUntil waitUntil, Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -267,7 +267,7 @@ public virtual ArmOperation Delete(WaitUntil waitUntil, CancellationToken cancel /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -313,7 +313,7 @@ public virtual async Task> UpdateAsy /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -359,7 +359,7 @@ public virtual ArmOperation Update(WaitUntil wait /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -421,7 +421,7 @@ public virtual async Task> AddTagAsync(s /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -483,7 +483,7 @@ public virtual Response AddTag(string key, string /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -540,7 +540,7 @@ public virtual async Task> SetTagsAsync( /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -597,7 +597,7 @@ public virtual Response SetTags(IDictionary /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -657,7 +657,7 @@ public virtual async Task> RemoveTagAsyn /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheResource.cs index f30a3d9221c7..aaa463c26d5b 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageCacheResource.cs @@ -110,7 +110,7 @@ public virtual StorageTargetCollection GetStorageTargets() /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -141,7 +141,7 @@ public virtual async Task> GetStorageTargetAsync /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -172,7 +172,7 @@ public virtual Response GetStorageTarget(string storageTa /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -212,7 +212,7 @@ public virtual async Task> GetAsync(CancellationT /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -252,7 +252,7 @@ public virtual Response Get(CancellationToken cancellation /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -294,7 +294,7 @@ public virtual async Task DeleteAsync(WaitUntil waitUntil, Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -336,7 +336,7 @@ public virtual ArmOperation Delete(WaitUntil waitUntil, CancellationToken cancel /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -382,7 +382,7 @@ public virtual async Task> UpdateAsync(WaitUn /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -428,7 +428,7 @@ public virtual ArmOperation Update(WaitUntil waitUntil, St /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -470,7 +470,7 @@ public virtual async Task EnableDebugInfoAsync(WaitUntil waitUntil /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -512,7 +512,7 @@ public virtual ArmOperation EnableDebugInfo(WaitUntil waitUntil, CancellationTok /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -554,7 +554,7 @@ public virtual async Task FlushAsync(WaitUntil waitUntil, Cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -596,7 +596,7 @@ public virtual ArmOperation Flush(WaitUntil waitUntil, CancellationToken cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -638,7 +638,7 @@ public virtual async Task StartAsync(WaitUntil waitUntil, Cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -680,7 +680,7 @@ public virtual ArmOperation Start(WaitUntil waitUntil, CancellationToken cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -722,7 +722,7 @@ public virtual async Task StopAsync(WaitUntil waitUntil, Cancellat /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -764,7 +764,7 @@ public virtual ArmOperation Stop(WaitUntil waitUntil, CancellationToken cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -807,7 +807,7 @@ public virtual async Task StartPrimingJobAsync(WaitUntil waitUntil /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -850,7 +850,7 @@ public virtual ArmOperation StartPrimingJob(WaitUntil waitUntil, PrimingJob prim /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -893,7 +893,7 @@ public virtual async Task StopPrimingJobAsync(WaitUntil waitUntil, /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -936,7 +936,7 @@ public virtual ArmOperation StopPrimingJob(WaitUntil waitUntil, PrimingJobConten /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -979,7 +979,7 @@ public virtual async Task PausePrimingJobAsync(WaitUntil waitUntil /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1022,7 +1022,7 @@ public virtual ArmOperation PausePrimingJob(WaitUntil waitUntil, PrimingJobConte /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1065,7 +1065,7 @@ public virtual async Task ResumePrimingJobAsync(WaitUntil waitUnti /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1108,7 +1108,7 @@ public virtual ArmOperation ResumePrimingJob(WaitUntil waitUntil, PrimingJobCont /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1150,7 +1150,7 @@ public virtual async Task UpgradeFirmwareAsync(WaitUntil waitUntil /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1192,7 +1192,7 @@ public virtual ArmOperation UpgradeFirmware(WaitUntil waitUntil, CancellationTok /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1235,7 +1235,7 @@ public virtual async Task UpdateSpaceAllocationAsync(WaitUntil wai /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1278,7 +1278,7 @@ public virtual ArmOperation UpdateSpaceAllocation(WaitUntil waitUntil, IEnumerab /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1340,7 +1340,7 @@ public virtual async Task> AddTagAsync(string key /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1402,7 +1402,7 @@ public virtual Response AddTag(string key, string value, C /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1459,7 +1459,7 @@ public virtual async Task> SetTagsAsync(IDictiona /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1516,7 +1516,7 @@ public virtual Response SetTags(IDictionary /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -1576,7 +1576,7 @@ public virtual async Task> RemoveTagAsync(string /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetCollection.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetCollection.cs index d65ff653dff6..b2875b6a8191 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetCollection.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetCollection.cs @@ -64,7 +64,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -113,7 +113,7 @@ public virtual async Task> CreateOrUpdateAsy /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -162,7 +162,7 @@ public virtual ArmOperation CreateOrUpdate(WaitUntil wait /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -207,7 +207,7 @@ public virtual async Task> GetAsync(string stora /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -252,7 +252,7 @@ public virtual Response Get(string storageTargetName, Can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -282,7 +282,7 @@ public virtual AsyncPageable GetAllAsync(CancellationToke /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -312,7 +312,7 @@ public virtual Pageable GetAll(CancellationToken cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -355,7 +355,7 @@ public virtual async Task> ExistsAsync(string storageTargetName, /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -398,7 +398,7 @@ public virtual Response Exists(string storageTargetName, CancellationToken /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -443,7 +443,7 @@ public virtual async Task> GetIfExistsAs /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetResource.cs b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetResource.cs index 61c51dbd5829..289347a44734 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetResource.cs +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/Generated/StorageTargetResource.cs @@ -101,7 +101,7 @@ internal static void ValidateResourceId(ResourceIdentifier id) /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -141,7 +141,7 @@ public virtual async Task> GetAsync(Cancellation /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -181,7 +181,7 @@ public virtual Response Get(CancellationToken cancellatio /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -224,7 +224,7 @@ public virtual async Task DeleteAsync(WaitUntil waitUntil, string /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -267,7 +267,7 @@ public virtual ArmOperation Delete(WaitUntil waitUntil, string force = null, Can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -313,7 +313,7 @@ public virtual async Task> UpdateAsync(WaitU /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -359,7 +359,7 @@ public virtual ArmOperation Update(WaitUntil waitUntil, S /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -401,7 +401,7 @@ public virtual async Task RefreshDnsAsync(WaitUntil waitUntil, Can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -443,7 +443,7 @@ public virtual ArmOperation RefreshDns(WaitUntil waitUntil, CancellationToken ca /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -485,7 +485,7 @@ public virtual async Task FlushAsync(WaitUntil waitUntil, Cancella /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -527,7 +527,7 @@ public virtual ArmOperation Flush(WaitUntil waitUntil, CancellationToken cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -569,7 +569,7 @@ public virtual async Task SuspendAsync(WaitUntil waitUntil, Cancel /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -611,7 +611,7 @@ public virtual ArmOperation Suspend(WaitUntil waitUntil, CancellationToken cance /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -653,7 +653,7 @@ public virtual async Task ResumeAsync(WaitUntil waitUntil, Cancell /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -695,7 +695,7 @@ public virtual ArmOperation Resume(WaitUntil waitUntil, CancellationToken cancel /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -737,7 +737,7 @@ public virtual async Task InvalidateAsync(WaitUntil waitUntil, Can /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -779,7 +779,7 @@ public virtual ArmOperation Invalidate(WaitUntil waitUntil, CancellationToken ca /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource @@ -821,7 +821,7 @@ public virtual async Task RestoreDefaultsAsync(WaitUntil waitUntil /// /// /// Default Api Version - /// 2024-03-01 + /// 2025-07-01 /// /// /// Resource diff --git a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/autorest.md b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/autorest.md index 8c30cc7e2795..115342567c77 100644 --- a/sdk/storagecache/Azure.ResourceManager.StorageCache/src/autorest.md +++ b/sdk/storagecache/Azure.ResourceManager.StorageCache/src/autorest.md @@ -8,7 +8,7 @@ azure-arm: true csharp: true library-name: StorageCache namespace: Azure.ResourceManager.StorageCache -require: https://github.com/Azure/azure-rest-api-specs/blob/cb1185d9961b7dabe002fdb4c3a28c07d130e47e/specification/storagecache/resource-manager/readme.md +require: /mnt/vss/_work/1/s/azure-rest-api-specs/specification/storagecache/resource-manager/readme.md output-folder: $(this-folder)/Generated clear-output-folder: true sample-gen: