diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIFoundryModelCatalogName.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIFoundryModelCatalogName.cs similarity index 87% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/AIFoundryModelCatalogName.cs rename to sdk/search/Azure.Search.Documents/src/Generated/AIFoundryModelCatalogName.cs index d65745f5b56d..780be23f1219 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIFoundryModelCatalogName.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIFoundryModelCatalogName.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// The name of the embedding model from the Azure AI Studio Catalog that will be called. + /// + /// The name of the embedding model from the Azure AI Foundry Catalog that will be + /// called. + /// public readonly partial struct AIFoundryModelCatalogName : IEquatable { private readonly string _value; @@ -22,17 +25,17 @@ public AIFoundryModelCatalogName(string value) _value = value ?? throw new ArgumentNullException(nameof(value)); } - private const string OpenAIClipImageTextEmbeddingsVitBasePatch32Value = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32"; - private const string OpenAIClipImageTextEmbeddingsViTLargePatch14336Value = "OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336"; + private const string OpenAICLIPImageTextEmbeddingsVitBasePatch32Value = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32"; + private const string OpenAICLIPImageTextEmbeddingsViTLargePatch14336Value = "OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336"; private const string FacebookDinoV2ImageEmbeddingsViTBaseValue = "Facebook-DinoV2-Image-Embeddings-ViT-Base"; private const string FacebookDinoV2ImageEmbeddingsViTGiantValue = "Facebook-DinoV2-Image-Embeddings-ViT-Giant"; private const string CohereEmbedV3EnglishValue = "Cohere-embed-v3-english"; private const string CohereEmbedV3MultilingualValue = "Cohere-embed-v3-multilingual"; /// OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32. - public static AIFoundryModelCatalogName OpenAIClipImageTextEmbeddingsVitBasePatch32 { get; } = new AIFoundryModelCatalogName(OpenAIClipImageTextEmbeddingsVitBasePatch32Value); + public static AIFoundryModelCatalogName OpenAICLIPImageTextEmbeddingsVitBasePatch32 { get; } = new AIFoundryModelCatalogName(OpenAICLIPImageTextEmbeddingsVitBasePatch32Value); /// OpenAI-CLIP-Image-Text-Embeddings-ViT-Large-Patch14-336. - public static AIFoundryModelCatalogName OpenAIClipImageTextEmbeddingsViTLargePatch14336 { get; } = new AIFoundryModelCatalogName(OpenAIClipImageTextEmbeddingsViTLargePatch14336Value); + public static AIFoundryModelCatalogName OpenAICLIPImageTextEmbeddingsViTLargePatch14336 { get; } = new AIFoundryModelCatalogName(OpenAICLIPImageTextEmbeddingsViTLargePatch14336Value); /// Facebook-DinoV2-Image-Embeddings-ViT-Base. public static AIFoundryModelCatalogName FacebookDinoV2ImageEmbeddingsViTBase { get; } = new AIFoundryModelCatalogName(FacebookDinoV2ImageEmbeddingsViTBaseValue); /// Facebook-DinoV2-Image-Embeddings-ViT-Giant. diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.Serialization.cs new file mode 100644 index 000000000000..af5e70a0f75e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.Serialization.cs @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AIServicesAccountIdentity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesAccountIdentity)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Identity)) + { + writer.WritePropertyName("identity"u8); + writer.WriteObjectValue(Identity, options); + } + writer.WritePropertyName("subdomainUrl"u8); + writer.WriteStringValue(SubdomainUrl); + } + + AIServicesAccountIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesAccountIdentity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAIServicesAccountIdentity(document.RootElement, options); + } + + internal static AIServicesAccountIdentity DeserializeAIServicesAccountIdentity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SearchIndexerDataIdentity identity = default; + string subdomainUrl = default; + string odataType = default; + string description = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("identity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("subdomainUrl"u8)) + { + subdomainUrl = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AIServicesAccountIdentity(odataType, description, serializedAdditionalRawData, identity, subdomainUrl); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AIServicesAccountIdentity)} does not support writing '{options.Format}' format."); + } + } + + AIServicesAccountIdentity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesAccountIdentity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AIServicesAccountIdentity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AIServicesAccountIdentity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesAccountIdentity(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.cs new file mode 100644 index 000000000000..40e578211d6c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountIdentity.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The multi-region account of an Azure AI service resource that's attached to a + /// skillset. + /// + public partial class AIServicesAccountIdentity : CognitiveServicesAccount + { + /// Initializes a new instance of . + /// The subdomain url for the corresponding AI Service. + /// is null. + public AIServicesAccountIdentity(string subdomainUrl) + { + Argument.AssertNotNull(subdomainUrl, nameof(subdomainUrl)); + + OdataType = "#Microsoft.Azure.Search.AIServicesByIdentity"; + SubdomainUrl = subdomainUrl; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + /// + /// The user-assigned managed identity used for connections to AI Service. If not + /// specified, the system-assigned managed identity is used. On updates to the + /// skillset, if the identity is unspecified, the value remains unchanged. If set + /// to "none", the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// The subdomain url for the corresponding AI Service. + internal AIServicesAccountIdentity(string odataType, string description, IDictionary serializedAdditionalRawData, SearchIndexerDataIdentity identity, string subdomainUrl) : base(odataType, description, serializedAdditionalRawData) + { + Identity = identity; + SubdomainUrl = subdomainUrl; + } + + /// Initializes a new instance of for deserialization. + internal AIServicesAccountIdentity() + { + } + + /// + /// The user-assigned managed identity used for connections to AI Service. If not + /// specified, the system-assigned managed identity is used. On updates to the + /// skillset, if the identity is unspecified, the value remains unchanged. If set + /// to "none", the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity Identity { get; set; } + /// The subdomain url for the corresponding AI Service. + public string SubdomainUrl { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.Serialization.cs new file mode 100644 index 000000000000..aa300e235460 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.Serialization.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AIServicesAccountKey : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesAccountKey)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + writer.WritePropertyName("subdomainUrl"u8); + writer.WriteStringValue(SubdomainUrl); + } + + AIServicesAccountKey IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesAccountKey)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAIServicesAccountKey(document.RootElement, options); + } + + internal static AIServicesAccountKey DeserializeAIServicesAccountKey(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string key = default; + string subdomainUrl = default; + string odataType = default; + string description = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("subdomainUrl"u8)) + { + subdomainUrl = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AIServicesAccountKey(odataType, description, serializedAdditionalRawData, key, subdomainUrl); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AIServicesAccountKey)} does not support writing '{options.Format}' format."); + } + } + + AIServicesAccountKey IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesAccountKey(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AIServicesAccountKey)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AIServicesAccountKey FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesAccountKey(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.cs new file mode 100644 index 000000000000..5bc9e644ec58 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesAccountKey.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The account key of an Azure AI service resource that's attached to a skillset, + /// to be used with the resource's subdomain. + /// + public partial class AIServicesAccountKey : CognitiveServicesAccount + { + /// Initializes a new instance of . + /// The key used to provision the Azure AI service resource attached to a skillset. + /// The subdomain url for the corresponding AI Service. + /// or is null. + public AIServicesAccountKey(string key, string subdomainUrl) + { + Argument.AssertNotNull(key, nameof(key)); + Argument.AssertNotNull(subdomainUrl, nameof(subdomainUrl)); + + OdataType = "#Microsoft.Azure.Search.AIServicesByKey"; + Key = key; + SubdomainUrl = subdomainUrl; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + /// The key used to provision the Azure AI service resource attached to a skillset. + /// The subdomain url for the corresponding AI Service. + internal AIServicesAccountKey(string odataType, string description, IDictionary serializedAdditionalRawData, string key, string subdomainUrl) : base(odataType, description, serializedAdditionalRawData) + { + Key = key; + SubdomainUrl = subdomainUrl; + } + + /// Initializes a new instance of for deserialization. + internal AIServicesAccountKey() + { + } + + /// The key used to provision the Azure AI service resource attached to a skillset. + public string Key { get; set; } + /// The subdomain url for the corresponding AI Service. + public string SubdomainUrl { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.Serialization.cs new file mode 100644 index 000000000000..40c51cffdc69 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AIServicesVisionParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesVisionParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + writer.WritePropertyName("resourceUri"u8); + writer.WriteStringValue(ResourceUri.AbsoluteUri); + if (Optional.IsDefined(ApiKey)) + { + writer.WritePropertyName("apiKey"u8); + writer.WriteStringValue(ApiKey); + } + if (Optional.IsDefined(AuthIdentity)) + { + writer.WritePropertyName("authIdentity"u8); + writer.WriteObjectValue(AuthIdentity, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AIServicesVisionParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesVisionParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAIServicesVisionParameters(document.RootElement, options); + } + + internal static AIServicesVisionParameters DeserializeAIServicesVisionParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string modelVersion = default; + Uri resourceUri = default; + string apiKey = default; + SearchIndexerDataIdentity authIdentity = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("resourceUri"u8)) + { + resourceUri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("apiKey"u8)) + { + apiKey = property.Value.GetString(); + continue; + } + if (property.NameEquals("authIdentity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AIServicesVisionParameters(modelVersion, resourceUri, apiKey, authIdentity, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AIServicesVisionParameters)} does not support writing '{options.Format}' format."); + } + } + + AIServicesVisionParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesVisionParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AIServicesVisionParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AIServicesVisionParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesVisionParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.cs new file mode 100644 index 000000000000..7b3a90507233 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionParameters.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Specifies the AI Services Vision parameters for vectorizing a query image or + /// text. + /// + public partial class AIServicesVisionParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + /// The resource URI of the AI Services resource. + /// or is null. + public AIServicesVisionParameters(string modelVersion, Uri resourceUri) + { + Argument.AssertNotNull(modelVersion, nameof(modelVersion)); + Argument.AssertNotNull(resourceUri, nameof(resourceUri)); + + ModelVersion = modelVersion; + ResourceUri = resourceUri; + } + + /// Initializes a new instance of . + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + /// The resource URI of the AI Services resource. + /// API key of the designated AI Services resource. + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the index, if the identity is unspecified, the + /// value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Keeps track of any properties unknown to the library. + internal AIServicesVisionParameters(string modelVersion, Uri resourceUri, string apiKey, SearchIndexerDataIdentity authIdentity, IDictionary serializedAdditionalRawData) + { + ModelVersion = modelVersion; + ResourceUri = resourceUri; + ApiKey = apiKey; + AuthIdentity = authIdentity; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AIServicesVisionParameters() + { + } + + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + public string ModelVersion { get; set; } + /// The resource URI of the AI Services resource. + public Uri ResourceUri { get; set; } + /// API key of the designated AI Services resource. + public string ApiKey { get; set; } + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the index, if the identity is unspecified, the + /// value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity AuthIdentity { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.Serialization.cs new file mode 100644 index 000000000000..de8d24b1cbb4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AIServicesVisionVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesVisionVectorizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(AIServicesVisionParameters)) + { + writer.WritePropertyName("AIServicesVisionParameters"u8); + writer.WriteObjectValue(AIServicesVisionParameters, options); + } + } + + AIServicesVisionVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AIServicesVisionVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAIServicesVisionVectorizer(document.RootElement, options); + } + + internal static AIServicesVisionVectorizer DeserializeAIServicesVisionVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + AIServicesVisionParameters aiServicesVisionParameters = default; + string name = default; + VectorSearchVectorizerKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("AIServicesVisionParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + aiServicesVisionParameters = AIServicesVisionParameters.DeserializeAIServicesVisionParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchVectorizerKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AIServicesVisionVectorizer(name, kind, serializedAdditionalRawData, aiServicesVisionParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AIServicesVisionVectorizer)} does not support writing '{options.Format}' format."); + } + } + + AIServicesVisionVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesVisionVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AIServicesVisionVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AIServicesVisionVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAIServicesVisionVectorizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.cs new file mode 100644 index 000000000000..c3cbcee9cd14 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AIServicesVisionVectorizer.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Clears the identity property of a datasource. + public partial class AIServicesVisionVectorizer : VectorSearchVectorizer + { + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// is null. + public AIServicesVisionVectorizer(string vectorizerName) : base(vectorizerName) + { + Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); + + Kind = VectorSearchVectorizerKind.AIServicesVision; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + /// Contains the parameters specific to AI Services Vision embedding vectorization. + internal AIServicesVisionVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData, AIServicesVisionParameters aiServicesVisionParameters) : base(vectorizerName, kind, serializedAdditionalRawData) + { + AIServicesVisionParameters = aiServicesVisionParameters; + } + + /// Initializes a new instance of for deserialization. + internal AIServicesVisionVectorizer() + { + } + + /// Contains the parameters specific to AI Services Vision embedding vectorization. + public AIServicesVisionParameters AIServicesVisionParameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Aliases.cs b/sdk/search/Azure.Search.Documents/src/Generated/Aliases.cs new file mode 100644 index 000000000000..628e13f42849 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Aliases.cs @@ -0,0 +1,656 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Autorest.CSharp.Core; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The Aliases sub-client. + public partial class Aliases + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of Aliases for mocking. + protected Aliases() + { + } + + /// Initializes a new instance of Aliases. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal Aliases(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// Creates a new search alias. + /// The definition of the alias to create. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(SearchAlias @alias, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(@alias, nameof(@alias)); + + using RequestContent content = @alias.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// Creates a new search alias. + /// The definition of the alias to create. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(SearchAlias @alias, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(@alias, nameof(@alias)); + + using RequestContent content = @alias.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new search alias. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new search alias. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new search alias or updates an alias if it already exists. + /// The name of the alias. + /// The definition of the alias to create or update. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string aliasName, SearchAlias @alias, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + Argument.AssertNotNull(@alias, nameof(@alias)); + + using RequestContent content = @alias.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(aliasName, content, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// Creates a new search alias or updates an alias if it already exists. + /// The name of the alias. + /// The definition of the alias to create or update. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string aliasName, SearchAlias @alias, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + Argument.AssertNotNull(@alias, nameof(@alias)); + + using RequestContent content = @alias.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(aliasName, content, matchConditions, context); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new search alias or updates an alias if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the alias. + /// The content to send as the body of the request. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string aliasName, RequestContent content, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(aliasName, content, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new search alias or updates an alias if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the alias. + /// The content to send as the body of the request. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string aliasName, RequestContent content, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(aliasName, content, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a search alias and its associated mapping to an index. This operation + /// is permanent, with no recovery option. The mapped index is untouched by this + /// operation. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the alias. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string aliasName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(aliasName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a search alias and its associated mapping to an index. This operation + /// is permanent, with no recovery option. The mapped index is untouched by this + /// operation. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the alias. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string aliasName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(aliasName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves an alias definition. + /// The name of the alias. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetAliasAsync(string aliasName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetAliasAsync(aliasName, context).ConfigureAwait(false); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// Retrieves an alias definition. + /// The name of the alias. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetAlias(string aliasName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetAlias(aliasName, context); + return Response.FromValue(SearchAlias.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves an alias definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the alias. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetAliasAsync(string aliasName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.GetAlias"); + scope.Start(); + try + { + using HttpMessage message = CreateGetAliasRequest(aliasName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves an alias definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the alias. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetAlias(string aliasName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(aliasName, nameof(aliasName)); + + using var scope = ClientDiagnostics.CreateScope("Aliases.GetAlias"); + scope.Start(); + try + { + using HttpMessage message = CreateGetAliasRequest(aliasName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists all aliases available for a search service. + /// The cancellation token to use. + /// + public virtual AsyncPageable GetAliasesAsync(CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetAliasesRequest(context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => SearchAlias.DeserializeSearchAlias(e), ClientDiagnostics, _pipeline, "Aliases.GetAliases", "value", null, context); + } + + /// Lists all aliases available for a search service. + /// The cancellation token to use. + /// + public virtual Pageable GetAliases(CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetAliasesRequest(context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => SearchAlias.DeserializeSearchAlias(e), ClientDiagnostics, _pipeline, "Aliases.GetAliases", "value", null, context); + } + + /// + /// [Protocol Method] Lists all aliases available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual AsyncPageable GetAliasesAsync(RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetAliasesRequest(context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "Aliases.GetAliases", "value", null, context); + } + + /// + /// [Protocol Method] Lists all aliases available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual Pageable GetAliases(RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetAliasesRequest(context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "Aliases.GetAliases", "value", null, context); + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/aliases", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetAliasesRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/aliases", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateOrUpdateRequest(string aliasName, RequestContent content, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/aliases('", false); + uri.AppendPath(aliasName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string aliasName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/aliases('", false); + uri.AppendPath(aliasName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetAliasRequest(string aliasName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/aliases('", false); + uri.AppendPath(aliasName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AliasesRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/AliasesRestClient.cs deleted file mode 100644 index 811c8bd46402..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/AliasesRestClient.cs +++ /dev/null @@ -1,413 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents -{ - internal partial class AliasesRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of AliasesRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public AliasesRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCreateRequest(SearchAlias @alias) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/aliases", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(@alias); - request.Content = content; - return message; - } - - /// Creates a new search alias. - /// The definition of the alias to create. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SearchAlias @alias, CancellationToken cancellationToken = default) - { - if (@alias == null) - { - throw new ArgumentNullException(nameof(@alias)); - } - - using var message = CreateCreateRequest(@alias); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SearchAlias value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new search alias. - /// The definition of the alias to create. - /// The cancellation token to use. - /// is null. - public Response Create(SearchAlias @alias, CancellationToken cancellationToken = default) - { - if (@alias == null) - { - throw new ArgumentNullException(nameof(@alias)); - } - - using var message = CreateCreateRequest(@alias); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SearchAlias value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest() - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/aliases", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Lists all aliases available for a search service. - /// The cancellation token to use. - public async Task> ListAsync(CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListAliasesResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListAliasesResult.DeserializeListAliasesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Lists all aliases available for a search service. - /// The cancellation token to use. - public Response List(CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListAliasesResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListAliasesResult.DeserializeListAliasesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateOrUpdateRequest(string aliasName, SearchAlias @alias, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/aliases('", false); - uri.AppendPath(aliasName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(@alias); - request.Content = content; - return message; - } - - /// Creates a new search alias or updates an alias if it already exists. - /// The definition of the alias to create or update. - /// The definition of the alias to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string aliasName, SearchAlias @alias, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - if (@alias == null) - { - throw new ArgumentNullException(nameof(@alias)); - } - - using var message = CreateCreateOrUpdateRequest(aliasName, @alias, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchAlias value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new search alias or updates an alias if it already exists. - /// The definition of the alias to create or update. - /// The definition of the alias to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string aliasName, SearchAlias @alias, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - if (@alias == null) - { - throw new ArgumentNullException(nameof(@alias)); - } - - using var message = CreateCreateOrUpdateRequest(aliasName, @alias, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchAlias value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string aliasName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/aliases('", false); - uri.AppendPath(aliasName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes a search alias and its associated mapping to an index. This operation is permanent, with no recovery option. The mapped index is untouched by this operation. - /// The name of the alias to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string aliasName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - - using var message = CreateDeleteRequest(aliasName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes a search alias and its associated mapping to an index. This operation is permanent, with no recovery option. The mapped index is untouched by this operation. - /// The name of the alias to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string aliasName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - - using var message = CreateDeleteRequest(aliasName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string aliasName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/aliases('", false); - uri.AppendPath(aliasName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves an alias definition. - /// The name of the alias to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string aliasName, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - - using var message = CreateGetRequest(aliasName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchAlias value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves an alias definition. - /// The name of the alias to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string aliasName, CancellationToken cancellationToken = default) - { - if (aliasName == null) - { - throw new ArgumentNullException(nameof(aliasName)); - } - - using var message = CreateGetRequest(aliasName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchAlias value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchAlias.DeserializeSearchAlias(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.Serialization.cs new file mode 100644 index 000000000000..e5d5da02f5c2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class AnalyzeResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("tokens"u8); + writer.WriteStartArray(); + foreach (var item in Tokens) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AnalyzeResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAnalyzeResult(document.RootElement, options); + } + + internal static AnalyzeResult DeserializeAnalyzeResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList tokens = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tokens"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(AnalyzedTokenInfo.DeserializeAnalyzedTokenInfo(item, options)); + } + tokens = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AnalyzeResult(tokens, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support writing '{options.Format}' format."); + } + } + + AnalyzeResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAnalyzeResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AnalyzeResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAnalyzeResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.cs new file mode 100644 index 000000000000..c35cfcf9b23f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeResult.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// The result of testing an analyzer on text. + internal partial class AnalyzeResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The list of tokens returned by the analyzer specified in the request. + /// is null. + internal AnalyzeResult(IEnumerable tokens) + { + Argument.AssertNotNull(tokens, nameof(tokens)); + + Tokens = tokens.ToList(); + } + + /// Initializes a new instance of . + /// The list of tokens returned by the analyzer specified in the request. + /// Keeps track of any properties unknown to the library. + internal AnalyzeResult(IReadOnlyList tokens, IDictionary serializedAdditionalRawData) + { + Tokens = tokens; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AnalyzeResult() + { + } + + /// The list of tokens returned by the analyzer specified in the request. + public IReadOnlyList Tokens { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.Serialization.cs new file mode 100644 index 000000000000..58ff4e7848b1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.Serialization.cs @@ -0,0 +1,244 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class AnalyzeTextOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.AnalyzeTextOptions)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (Optional.IsDefined(AnalyzerName)) + { + writer.WritePropertyName("analyzer"u8); + writer.WriteStringValue(AnalyzerName.Value.ToString()); + } + if (Optional.IsDefined(TokenizerName)) + { + writer.WritePropertyName("tokenizer"u8); + writer.WriteStringValue(TokenizerName.Value.ToString()); + } + if (Optional.IsDefined(NormalizerName)) + { + writer.WritePropertyName("normalizer"u8); + writer.WriteStringValue(NormalizerName.Value.ToString()); + } + if (Optional.IsCollectionDefined(TokenFilters)) + { + writer.WritePropertyName("tokenFilters"u8); + writer.WriteStartArray(); + foreach (var item in TokenFilters) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(CharFilters)) + { + writer.WritePropertyName("charFilters"u8); + writer.WriteStartArray(); + foreach (var item in CharFilters) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.AnalyzeTextOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.AnalyzeTextOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.AnalyzeTextOptions.DeserializeAnalyzeTextOptions(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.AnalyzeTextOptions DeserializeAnalyzeTextOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + Search.Documents.LexicalAnalyzerName? analyzer = default; + LexicalTokenizerName? tokenizer = default; + Search.Documents.LexicalNormalizerName? normalizer = default; + IList tokenFilters = default; + IList charFilters = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("analyzer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + analyzer = new Search.Documents.LexicalAnalyzerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("tokenizer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + tokenizer = new LexicalTokenizerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("normalizer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + normalizer = new Search.Documents.LexicalNormalizerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("tokenFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new TokenFilterName(item.GetString())); + } + tokenFilters = array; + continue; + } + if (property.NameEquals("charFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + charFilters = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.AnalyzeTextOptions( + text, + analyzer, + tokenizer, + normalizer, + tokenFilters ?? new ChangeTrackingList(), + charFilters ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.AnalyzeTextOptions)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.AnalyzeTextOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.AnalyzeTextOptions.DeserializeAnalyzeTextOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.AnalyzeTextOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.AnalyzeTextOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.AnalyzeTextOptions.DeserializeAnalyzeTextOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.cs new file mode 100644 index 000000000000..6990d92f4b9b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzeTextOptions.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Specifies some text and analysis components used to break that text into tokens. + public partial class AnalyzeTextOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The text to break into tokens. + /// is null. + public AnalyzeTextOptions(string text) + { + Argument.AssertNotNull(text, nameof(text)); + + Text = text; + TokenFilters = new ChangeTrackingList(); + CharFilters = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The text to break into tokens. + /// + /// The name of the analyzer to use to break the given text. If this parameter is + /// not specified, you must specify a tokenizer instead. The tokenizer and analyzer + /// parameters are mutually exclusive. + /// + /// + /// The name of the tokenizer to use to break the given text. If this parameter is + /// not specified, you must specify an analyzer instead. The tokenizer and analyzer + /// parameters are mutually exclusive. + /// + /// The name of the normalizer to use to normalize the given text. + /// + /// An optional list of token filters to use when breaking the given text. This + /// parameter can only be set when using the tokenizer parameter. + /// + /// + /// An optional list of character filters to use when breaking the given text. This + /// parameter can only be set when using the tokenizer parameter. + /// + /// Keeps track of any properties unknown to the library. + internal AnalyzeTextOptions(string text, Search.Documents.LexicalAnalyzerName? analyzerName, LexicalTokenizerName? tokenizerName, Search.Documents.LexicalNormalizerName? normalizerName, IList tokenFilters, IList charFilters, IDictionary serializedAdditionalRawData) + { + Text = text; + AnalyzerName = analyzerName; + TokenizerName = tokenizerName; + NormalizerName = normalizerName; + TokenFilters = tokenFilters; + CharFilters = charFilters; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AnalyzeTextOptions() + { + } + + /// The text to break into tokens. + public string Text { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.Serialization.cs new file mode 100644 index 000000000000..7d843f5a6979 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.Serialization.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AnalyzedTokenInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzedTokenInfo)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("token"u8); + writer.WriteStringValue(Token); + writer.WritePropertyName("startOffset"u8); + writer.WriteNumberValue(StartOffset); + writer.WritePropertyName("endOffset"u8); + writer.WriteNumberValue(EndOffset); + writer.WritePropertyName("position"u8); + writer.WriteNumberValue(Position); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AnalyzedTokenInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzedTokenInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAnalyzedTokenInfo(document.RootElement, options); + } + + internal static AnalyzedTokenInfo DeserializeAnalyzedTokenInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string token = default; + int startOffset = default; + int endOffset = default; + int position = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("token"u8)) + { + token = property.Value.GetString(); + continue; + } + if (property.NameEquals("startOffset"u8)) + { + startOffset = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("endOffset"u8)) + { + endOffset = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("position"u8)) + { + position = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AnalyzedTokenInfo(token, startOffset, endOffset, position, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AnalyzedTokenInfo)} does not support writing '{options.Format}' format."); + } + } + + AnalyzedTokenInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAnalyzedTokenInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AnalyzedTokenInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AnalyzedTokenInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAnalyzedTokenInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.cs new file mode 100644 index 000000000000..c66aa12b387d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AnalyzedTokenInfo.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Information about a token returned by an analyzer. + public partial class AnalyzedTokenInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The token returned by the analyzer. + /// The index of the first character of the token in the input text. + /// The index of the last character of the token in the input text. + /// + /// The position of the token in the input text relative to other tokens. The first + /// token in the input text has position 0, the next has position 1, and so on. + /// Depending on the analyzer used, some tokens might have the same position, for + /// example if they are synonyms of each other. + /// + /// is null. + internal AnalyzedTokenInfo(string token, int startOffset, int endOffset, int position) + { + Argument.AssertNotNull(token, nameof(token)); + + Token = token; + StartOffset = startOffset; + EndOffset = endOffset; + Position = position; + } + + /// Initializes a new instance of . + /// The token returned by the analyzer. + /// The index of the first character of the token in the input text. + /// The index of the last character of the token in the input text. + /// + /// The position of the token in the input text relative to other tokens. The first + /// token in the input text has position 0, the next has position 1, and so on. + /// Depending on the analyzer used, some tokens might have the same position, for + /// example if they are synonyms of each other. + /// + /// Keeps track of any properties unknown to the library. + internal AnalyzedTokenInfo(string token, int startOffset, int endOffset, int position, IDictionary serializedAdditionalRawData) + { + Token = token; + StartOffset = startOffset; + EndOffset = endOffset; + Position = position; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AnalyzedTokenInfo() + { + } + + /// The token returned by the analyzer. + public string Token { get; } + /// The index of the first character of the token in the input text. + public int StartOffset { get; } + /// The index of the last character of the token in the input text. + public int EndOffset { get; } + /// + /// The position of the token in the input text relative to other tokens. The first + /// token in the input text has position 0, the next has position 1, and so on. + /// Depending on the analyzer used, some tokens might have the same position, for + /// example if they are synonyms of each other. + /// + public int Position { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.Serialization.cs new file mode 100644 index 000000000000..c9a6c2f4697e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AsciiFoldingTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AsciiFoldingTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(PreserveOriginal)) + { + writer.WritePropertyName("preserveOriginal"u8); + writer.WriteBooleanValue(PreserveOriginal.Value); + } + } + + AsciiFoldingTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AsciiFoldingTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAsciiFoldingTokenFilter(document.RootElement, options); + } + + internal static AsciiFoldingTokenFilter DeserializeAsciiFoldingTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? preserveOriginal = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("preserveOriginal"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preserveOriginal = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AsciiFoldingTokenFilter(odataType, name, serializedAdditionalRawData, preserveOriginal); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AsciiFoldingTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + AsciiFoldingTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAsciiFoldingTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AsciiFoldingTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AsciiFoldingTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAsciiFoldingTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.cs new file mode 100644 index 000000000000..619de16ed5ea --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AsciiFoldingTokenFilter.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in + /// the first 127 ASCII characters (the "Basic Latin" Unicode block) into their + /// ASCII equivalents, if such equivalents exist. This token filter is implemented + /// using Apache Lucene. + /// + public partial class AsciiFoldingTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public AsciiFoldingTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.AsciiFoldingTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating whether the original token will be kept. Default is false. + internal AsciiFoldingTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, bool? preserveOriginal) : base(odataType, name, serializedAdditionalRawData) + { + PreserveOriginal = preserveOriginal; + } + + /// Initializes a new instance of for deserialization. + internal AsciiFoldingTokenFilter() + { + } + + /// A value indicating whether the original token will be kept. Default is false. + public bool? PreserveOriginal { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.Serialization.cs new file mode 100644 index 000000000000..6626476cbb44 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AutocompleteItem : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteItem)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + writer.WritePropertyName("queryPlusText"u8); + writer.WriteStringValue(QueryPlusText); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutocompleteItem IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteItem)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutocompleteItem(document.RootElement, options); + } + + internal static AutocompleteItem DeserializeAutocompleteItem(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + string queryPlusText = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("queryPlusText"u8)) + { + queryPlusText = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutocompleteItem(text, queryPlusText, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutocompleteItem)} does not support writing '{options.Format}' format."); + } + } + + AutocompleteItem IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteItem(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutocompleteItem)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AutocompleteItem FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteItem(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.cs new file mode 100644 index 000000000000..e7e4a55ae4b0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteItem.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// The result of Autocomplete requests. + public partial class AutocompleteItem + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The completed term. + /// The query along with the completed term. + /// or is null. + internal AutocompleteItem(string text, string queryPlusText) + { + Argument.AssertNotNull(text, nameof(text)); + Argument.AssertNotNull(queryPlusText, nameof(queryPlusText)); + + Text = text; + QueryPlusText = queryPlusText; + } + + /// Initializes a new instance of . + /// The completed term. + /// The query along with the completed term. + /// Keeps track of any properties unknown to the library. + internal AutocompleteItem(string text, string queryPlusText, IDictionary serializedAdditionalRawData) + { + Text = text; + QueryPlusText = queryPlusText; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AutocompleteItem() + { + } + + /// The completed term. + public string Text { get; } + /// The query along with the completed term. + public string QueryPlusText { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteMode.cs new file mode 100644 index 000000000000..a12b55393e80 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteMode.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context in + /// producing autocomplete terms. + /// + public readonly partial struct AutocompleteMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AutocompleteMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string OneTermValue = "oneTerm"; + private const string TwoTermsValue = "twoTerms"; + private const string OneTermWithContextValue = "oneTermWithContext"; + + /// + /// Only one term is suggested. If the query has two terms, only the last term is + /// completed. For example, if the input is 'washington medic', the suggested terms + /// could include 'medicaid', 'medicare', and 'medicine'. + /// + public static AutocompleteMode OneTerm { get; } = new AutocompleteMode(OneTermValue); + /// + /// Matching two-term phrases in the index will be suggested. For example, if the + /// input is 'medic', the suggested terms could include 'medicare coverage' and 'medical assistant'. + /// + public static AutocompleteMode TwoTerms { get; } = new AutocompleteMode(TwoTermsValue); + /// + /// Completes the last term in a query with two or more terms, where the last two + /// terms are a phrase that exists in the index. For example, if the input is 'washington medic', the suggested terms could include 'washington medicaid' and 'washington medical'. + /// + public static AutocompleteMode OneTermWithContext { get; } = new AutocompleteMode(OneTermWithContextValue); + /// Determines if two values are the same. + public static bool operator ==(AutocompleteMode left, AutocompleteMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AutocompleteMode left, AutocompleteMode right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AutocompleteMode(string value) => new AutocompleteMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AutocompleteMode other && Equals(other); + /// + public bool Equals(AutocompleteMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.Serialization.cs new file mode 100644 index 000000000000..6ea3d434d194 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.Serialization.cs @@ -0,0 +1,265 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AutocompleteOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteOptions)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("search"u8); + writer.WriteStringValue(SearchText); + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("autocompleteMode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + if (Optional.IsDefined(Filter)) + { + writer.WritePropertyName("filter"u8); + writer.WriteStringValue(Filter); + } + if (Optional.IsDefined(UseFuzzyMatching)) + { + writer.WritePropertyName("fuzzy"u8); + writer.WriteBooleanValue(UseFuzzyMatching.Value); + } + if (Optional.IsDefined(HighlightPostTag)) + { + writer.WritePropertyName("highlightPostTag"u8); + writer.WriteStringValue(HighlightPostTag); + } + if (Optional.IsDefined(HighlightPreTag)) + { + writer.WritePropertyName("highlightPreTag"u8); + writer.WriteStringValue(HighlightPreTag); + } + if (Optional.IsDefined(MinimumCoverage)) + { + writer.WritePropertyName("minimumCoverage"u8); + writer.WriteNumberValue(MinimumCoverage.Value); + } + if (Optional.IsDefined(SearchFieldsRaw)) + { + writer.WritePropertyName("searchFields"u8); + writer.WriteStringValue(SearchFieldsRaw); + } + writer.WritePropertyName("suggesterName"u8); + writer.WriteStringValue(SuggesterName); + if (Optional.IsDefined(Size)) + { + writer.WritePropertyName("top"u8); + writer.WriteNumberValue(Size.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutocompleteOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutocompleteOptions(document.RootElement, options); + } + + internal static AutocompleteOptions DeserializeAutocompleteOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string search = default; + AutocompleteMode? autocompleteMode = default; + string filter = default; + bool? fuzzy = default; + string highlightPostTag = default; + string highlightPreTag = default; + double? minimumCoverage = default; + string searchFields = default; + string suggesterName = default; + int? top = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("search"u8)) + { + search = property.Value.GetString(); + continue; + } + if (property.NameEquals("autocompleteMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + autocompleteMode = new AutocompleteMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("filter"u8)) + { + filter = property.Value.GetString(); + continue; + } + if (property.NameEquals("fuzzy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fuzzy = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("highlightPostTag"u8)) + { + highlightPostTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("highlightPreTag"u8)) + { + highlightPreTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("minimumCoverage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumCoverage = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("searchFields"u8)) + { + searchFields = property.Value.GetString(); + continue; + } + if (property.NameEquals("suggesterName"u8)) + { + suggesterName = property.Value.GetString(); + continue; + } + if (property.NameEquals("top"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + top = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutocompleteOptions( + search, + autocompleteMode, + filter, + fuzzy, + highlightPostTag, + highlightPreTag, + minimumCoverage, + searchFields, + suggesterName, + top, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutocompleteOptions)} does not support writing '{options.Format}' format."); + } + } + + AutocompleteOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutocompleteOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AutocompleteOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.cs new file mode 100644 index 000000000000..28d2f4f827b2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteOptions.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Parameters for fuzzy matching, and other autocomplete query behaviors. + public partial class AutocompleteOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The search text on which to base autocomplete results. + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context while + /// producing auto-completed terms. + /// + /// + /// An OData expression that filters the documents used to produce completed terms + /// for the Autocomplete result. + /// + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will autocomplete terms even if + /// there's a substituted or missing character in the search text. While this + /// provides a better experience in some scenarios, it comes at a performance cost + /// as fuzzy autocomplete queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The comma-separated list of field names to consider when querying for + /// auto-completed terms. Target fields must be included in the specified + /// suggester. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// The number of auto-completed terms to retrieve. This must be a value between 1 + /// and 100. The default is 5. + /// + /// Keeps track of any properties unknown to the library. + internal AutocompleteOptions(string searchText, AutocompleteMode? mode, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, string searchFieldsRaw, string suggesterName, int? size, IDictionary serializedAdditionalRawData) + { + SearchText = searchText; + Mode = mode; + Filter = filter; + UseFuzzyMatching = useFuzzyMatching; + HighlightPostTag = highlightPostTag; + HighlightPreTag = highlightPreTag; + MinimumCoverage = minimumCoverage; + SearchFieldsRaw = searchFieldsRaw; + SuggesterName = suggesterName; + Size = size; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will autocomplete terms even if + /// there's a substituted or missing character in the search text. While this + /// provides a better experience in some scenarios, it comes at a performance cost + /// as fuzzy autocomplete queries are slower and consume more resources. + /// + public bool? UseFuzzyMatching { get; set; } + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + public string HighlightPostTag { get; set; } + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + public string HighlightPreTag { get; set; } + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + public double? MinimumCoverage { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.Serialization.cs new file mode 100644 index 000000000000..bdf3f786c526 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + public partial class AutocompleteResults : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteResults)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Coverage)) + { + writer.WritePropertyName("@search.coverage"u8); + writer.WriteNumberValue(Coverage.Value); + } + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Results) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AutocompleteResults IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AutocompleteResults)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAutocompleteResults(document.RootElement, options); + } + + internal static AutocompleteResults DeserializeAutocompleteResults(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double? searchCoverage = default; + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@search.coverage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchCoverage = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(AutocompleteItem.DeserializeAutocompleteItem(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AutocompleteResults(searchCoverage, value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AutocompleteResults)} does not support writing '{options.Format}' format."); + } + } + + AutocompleteResults IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteResults(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AutocompleteResults)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AutocompleteResults FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAutocompleteResults(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.cs b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.cs new file mode 100644 index 000000000000..bc9d3e988a18 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AutocompleteResults.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents.Models +{ + /// The result of Autocomplete query. + public partial class AutocompleteResults + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The list of returned Autocompleted items. + /// is null. + internal AutocompleteResults(IEnumerable results) + { + Argument.AssertNotNull(results, nameof(results)); + + Results = results.ToList(); + } + + /// Initializes a new instance of . + /// + /// A value indicating the percentage of the index that was considered by the + /// autocomplete request, or null if minimumCoverage was not specified in the + /// request. + /// + /// The list of returned Autocompleted items. + /// Keeps track of any properties unknown to the library. + internal AutocompleteResults(double? coverage, IReadOnlyList results, IDictionary serializedAdditionalRawData) + { + Coverage = coverage; + Results = results; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AutocompleteResults() + { + } + + /// + /// A value indicating the percentage of the index that was considered by the + /// autocomplete request, or null if minimumCoverage was not specified in the + /// request. + /// + public double? Coverage { get; } + /// The list of returned Autocompleted items. + public IReadOnlyList Results { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.Serialization.cs new file mode 100644 index 000000000000..8bb2ce98ff36 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureActiveDirectoryApplicationCredentials : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureActiveDirectoryApplicationCredentials)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("applicationId"u8); + writer.WriteStringValue(ApplicationId); + if (Optional.IsDefined(ApplicationSecret)) + { + writer.WritePropertyName("applicationSecret"u8); + writer.WriteStringValue(ApplicationSecret); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AzureActiveDirectoryApplicationCredentials IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureActiveDirectoryApplicationCredentials)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureActiveDirectoryApplicationCredentials(document.RootElement, options); + } + + internal static AzureActiveDirectoryApplicationCredentials DeserializeAzureActiveDirectoryApplicationCredentials(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string applicationId = default; + string applicationSecret = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("applicationId"u8)) + { + applicationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("applicationSecret"u8)) + { + applicationSecret = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureActiveDirectoryApplicationCredentials(applicationId, applicationSecret, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureActiveDirectoryApplicationCredentials)} does not support writing '{options.Format}' format."); + } + } + + AzureActiveDirectoryApplicationCredentials IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureActiveDirectoryApplicationCredentials(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureActiveDirectoryApplicationCredentials)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AzureActiveDirectoryApplicationCredentials FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureActiveDirectoryApplicationCredentials(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.cs new file mode 100644 index 000000000000..34c4ab2b23b5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureActiveDirectoryApplicationCredentials.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Credentials of a registered application created for your search service, used + /// for authenticated access to the encryption keys stored in Azure Key Vault. + /// + public partial class AzureActiveDirectoryApplicationCredentials + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// An AAD Application ID that was granted the required access permissions to the + /// Azure Key Vault that is to be used when encrypting your data at rest. The + /// Application ID should not be confused with the Object ID for your AAD + /// Application. + /// + /// is null. + public AzureActiveDirectoryApplicationCredentials(string applicationId) + { + Argument.AssertNotNull(applicationId, nameof(applicationId)); + + ApplicationId = applicationId; + } + + /// Initializes a new instance of . + /// + /// An AAD Application ID that was granted the required access permissions to the + /// Azure Key Vault that is to be used when encrypting your data at rest. The + /// Application ID should not be confused with the Object ID for your AAD + /// Application. + /// + /// The authentication key of the specified AAD application. + /// Keeps track of any properties unknown to the library. + internal AzureActiveDirectoryApplicationCredentials(string applicationId, string applicationSecret, IDictionary serializedAdditionalRawData) + { + ApplicationId = applicationId; + ApplicationSecret = applicationSecret; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AzureActiveDirectoryApplicationCredentials() + { + } + + /// + /// An AAD Application ID that was granted the required access permissions to the + /// Azure Key Vault that is to be used when encrypting your data at rest. The + /// Application ID should not be confused with the Object ID for your AAD + /// Application. + /// + public string ApplicationId { get; set; } + /// The authentication key of the specified AAD application. + public string ApplicationSecret { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.Serialization.cs new file mode 100644 index 000000000000..c9ddc3795c83 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.Serialization.cs @@ -0,0 +1,212 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureMachineLearningParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("uri"u8); + writer.WriteStringValue(ScoringUri.AbsoluteUri); + if (Optional.IsDefined(AuthenticationKey)) + { + writer.WritePropertyName("key"u8); + writer.WriteStringValue(AuthenticationKey); + } + if (Optional.IsDefined(ResourceId)) + { + writer.WritePropertyName("resourceId"u8); + writer.WriteStringValue(ResourceId); + } + if (Optional.IsDefined(Timeout)) + { + writer.WritePropertyName("timeout"u8); + writer.WriteStringValue(Timeout.Value, "P"); + } + if (Optional.IsDefined(Region)) + { + writer.WritePropertyName("region"u8); + writer.WriteStringValue(Region); + } + if (Optional.IsDefined(ModelName)) + { + writer.WritePropertyName("modelName"u8); + writer.WriteStringValue(ModelName.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AzureMachineLearningParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureMachineLearningParameters(document.RootElement, options); + } + + internal static AzureMachineLearningParameters DeserializeAzureMachineLearningParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri uri = default; + string key = default; + string resourceId = default; + TimeSpan? timeout = default; + string region = default; + AIFoundryModelCatalogName? modelName = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("uri"u8)) + { + uri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("resourceId"u8)) + { + resourceId = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + timeout = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("region"u8)) + { + region = property.Value.GetString(); + continue; + } + if (property.NameEquals("modelName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + modelName = new AIFoundryModelCatalogName(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureMachineLearningParameters( + uri, + key, + resourceId, + timeout, + region, + modelName, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureMachineLearningParameters)} does not support writing '{options.Format}' format."); + } + } + + AzureMachineLearningParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureMachineLearningParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AzureMachineLearningParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.cs new file mode 100644 index 000000000000..08f98e4f5ba5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningParameters.cs @@ -0,0 +1,127 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Specifies the properties for connecting to an AML vectorizer. + public partial class AzureMachineLearningParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// (Required for no authentication or key authentication) The scoring URI of the + /// AML service to which the JSON payload will be sent. Only the https URI scheme + /// is allowed. + /// + /// is null. + public AzureMachineLearningParameters(Uri scoringUri) + { + Argument.AssertNotNull(scoringUri, nameof(scoringUri)); + + ScoringUri = scoringUri; + } + + /// Initializes a new instance of . + /// + /// (Required for no authentication or key authentication) The scoring URI of the + /// AML service to which the JSON payload will be sent. Only the https URI scheme + /// is allowed. + /// + /// (Required for key authentication) The key for the AML service. + /// + /// (Required for token authentication). The Azure Resource Manager resource ID of + /// the AML service. It should be in the format + /// subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. + /// + /// + /// (Optional) When specified, indicates the timeout for the http client making the + /// API call. + /// + /// (Optional for token authentication). The region the AML service is deployed in. + /// + /// The name of the embedding model from the Azure AI Foundry Catalog that is + /// deployed at the provided endpoint. + /// + /// Keeps track of any properties unknown to the library. + internal AzureMachineLearningParameters(Uri scoringUri, string authenticationKey, string resourceId, TimeSpan? timeout, string region, AIFoundryModelCatalogName? modelName, IDictionary serializedAdditionalRawData) + { + ScoringUri = scoringUri; + AuthenticationKey = authenticationKey; + ResourceId = resourceId; + Timeout = timeout; + Region = region; + ModelName = modelName; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AzureMachineLearningParameters() + { + } + + /// + /// (Required for no authentication or key authentication) The scoring URI of the + /// AML service to which the JSON payload will be sent. Only the https URI scheme + /// is allowed. + /// + public Uri ScoringUri { get; set; } + /// (Required for key authentication) The key for the AML service. + public string AuthenticationKey { get; set; } + /// + /// (Required for token authentication). The Azure Resource Manager resource ID of + /// the AML service. It should be in the format + /// subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. + /// + public string ResourceId { get; set; } + /// + /// (Optional) When specified, indicates the timeout for the http client making the + /// API call. + /// + public TimeSpan? Timeout { get; set; } + /// (Optional for token authentication). The region the AML service is deployed in. + public string Region { get; set; } + /// + /// The name of the embedding model from the Azure AI Foundry Catalog that is + /// deployed at the provided endpoint. + /// + public AIFoundryModelCatalogName? ModelName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.Serialization.cs new file mode 100644 index 000000000000..17815a935203 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.Serialization.cs @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureMachineLearningSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(ScoringUri)) + { + writer.WritePropertyName("uri"u8); + writer.WriteStringValue(ScoringUri.AbsoluteUri); + } + if (Optional.IsDefined(AuthenticationKey)) + { + writer.WritePropertyName("key"u8); + writer.WriteStringValue(AuthenticationKey); + } + if (Optional.IsDefined(ResourceId)) + { + writer.WritePropertyName("resourceId"u8); + writer.WriteStringValue(ResourceId); + } + if (Optional.IsDefined(Timeout)) + { + writer.WritePropertyName("timeout"u8); + writer.WriteStringValue(Timeout.Value, "P"); + } + if (Optional.IsDefined(Region)) + { + writer.WritePropertyName("region"u8); + writer.WriteStringValue(Region); + } + if (Optional.IsDefined(DegreeOfParallelism)) + { + writer.WritePropertyName("degreeOfParallelism"u8); + writer.WriteNumberValue(DegreeOfParallelism.Value); + } + } + + AzureMachineLearningSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureMachineLearningSkill(document.RootElement, options); + } + + internal static AzureMachineLearningSkill DeserializeAzureMachineLearningSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri uri = default; + string key = default; + string resourceId = default; + TimeSpan? timeout = default; + string region = default; + int? degreeOfParallelism = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("uri"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + uri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("resourceId"u8)) + { + resourceId = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + timeout = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("region"u8)) + { + region = property.Value.GetString(); + continue; + } + if (property.NameEquals("degreeOfParallelism"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + degreeOfParallelism = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureMachineLearningSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + uri, + key, + resourceId, + timeout, + region, + degreeOfParallelism); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureMachineLearningSkill)} does not support writing '{options.Format}' format."); + } + } + + AzureMachineLearningSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureMachineLearningSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AzureMachineLearningSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.cs new file mode 100644 index 000000000000..e5886c94f484 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningSkill.cs @@ -0,0 +1,133 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The AML skill allows you to extend AI enrichment with a custom Azure Machine + /// Learning (AML) model. Once an AML model is trained and deployed, an AML skill + /// integrates it into AI enrichment. + /// + public partial class AzureMachineLearningSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public AzureMachineLearningSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Custom.AmlSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// + /// (Required for no authentication or key authentication) The scoring URI of the + /// AML service to which the JSON payload will be sent. Only the https URI scheme + /// is allowed. + /// + /// (Required for key authentication) The key for the AML service. + /// + /// (Required for token authentication). The Azure Resource Manager resource ID of + /// the AML service. It should be in the format + /// subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. + /// + /// + /// (Optional) When specified, indicates the timeout for the http client making the + /// API call. + /// + /// (Optional for token authentication). The region the AML service is deployed in. + /// + /// (Optional) When specified, indicates the number of calls the indexer will make + /// in parallel to the endpoint you have provided. You can decrease this value if + /// your endpoint is failing under too high of a request load, or raise it if your + /// endpoint is able to accept more requests and you would like an increase in the + /// performance of the indexer. If not set, a default value of 5 is used. The + /// degreeOfParallelism can be set to a maximum of 10 and a minimum of 1. + /// + internal AzureMachineLearningSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, Uri scoringUri, string authenticationKey, string resourceId, TimeSpan? timeout, string region, int? degreeOfParallelism) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + ScoringUri = scoringUri; + AuthenticationKey = authenticationKey; + ResourceId = resourceId; + Timeout = timeout; + Region = region; + DegreeOfParallelism = degreeOfParallelism; + } + + /// Initializes a new instance of for deserialization. + internal AzureMachineLearningSkill() + { + } + + /// + /// (Required for no authentication or key authentication) The scoring URI of the + /// AML service to which the JSON payload will be sent. Only the https URI scheme + /// is allowed. + /// + public Uri ScoringUri { get; set; } + /// (Required for key authentication) The key for the AML service. + public string AuthenticationKey { get; set; } + /// + /// (Required for token authentication). The Azure Resource Manager resource ID of + /// the AML service. It should be in the format + /// subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. + /// + public string ResourceId { get; set; } + /// + /// (Optional) When specified, indicates the timeout for the http client making the + /// API call. + /// + public TimeSpan? Timeout { get; set; } + /// (Optional for token authentication). The region the AML service is deployed in. + public string Region { get; set; } + /// + /// (Optional) When specified, indicates the number of calls the indexer will make + /// in parallel to the endpoint you have provided. You can decrease this value if + /// your endpoint is failing under too high of a request load, or raise it if your + /// endpoint is able to accept more requests and you would like an increase in the + /// performance of the indexer. If not set, a default value of 5 is used. The + /// degreeOfParallelism can be set to a maximum of 10 and a minimum of 1. + /// + public int? DegreeOfParallelism { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.Serialization.cs new file mode 100644 index 000000000000..a35b14f3830a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureMachineLearningVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningVectorizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(AMLParameters)) + { + writer.WritePropertyName("amlParameters"u8); + writer.WriteObjectValue(AMLParameters, options); + } + } + + AzureMachineLearningVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureMachineLearningVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureMachineLearningVectorizer(document.RootElement, options); + } + + internal static AzureMachineLearningVectorizer DeserializeAzureMachineLearningVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + AzureMachineLearningParameters amlParameters = default; + string name = default; + VectorSearchVectorizerKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("amlParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + amlParameters = AzureMachineLearningParameters.DeserializeAzureMachineLearningParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchVectorizerKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureMachineLearningVectorizer(name, kind, serializedAdditionalRawData, amlParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureMachineLearningVectorizer)} does not support writing '{options.Format}' format."); + } + } + + AzureMachineLearningVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureMachineLearningVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AzureMachineLearningVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureMachineLearningVectorizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.cs new file mode 100644 index 000000000000..2d0991eb0fd3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureMachineLearningVectorizer.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Specifies an Azure Machine Learning endpoint deployed via the Azure AI Foundry + /// Model Catalog for generating the vector embedding of a query string. + /// + public partial class AzureMachineLearningVectorizer : VectorSearchVectorizer + { + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// is null. + public AzureMachineLearningVectorizer(string vectorizerName) : base(vectorizerName) + { + Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); + + Kind = VectorSearchVectorizerKind.AML; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + /// Specifies the properties of the AML vectorizer. + internal AzureMachineLearningVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData, AzureMachineLearningParameters amlParameters) : base(vectorizerName, kind, serializedAdditionalRawData) + { + AMLParameters = amlParameters; + } + + /// Initializes a new instance of for deserialization. + internal AzureMachineLearningVectorizer() + { + } + + /// Specifies the properties of the AML vectorizer. + public AzureMachineLearningParameters AMLParameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.Serialization.cs new file mode 100644 index 000000000000..e235a4122e59 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.Serialization.cs @@ -0,0 +1,261 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureOpenAIEmbeddingSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIEmbeddingSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(ResourceUrl)) + { + writer.WritePropertyName("resourceUri"u8); + writer.WriteStringValue(ResourceUrl.AbsoluteUri); + } + if (Optional.IsDefined(DeploymentName)) + { + writer.WritePropertyName("deploymentId"u8); + writer.WriteStringValue(DeploymentName); + } + if (Optional.IsDefined(ApiKey)) + { + writer.WritePropertyName("apiKey"u8); + writer.WriteStringValue(ApiKey); + } + if (Optional.IsDefined(AuthIdentity)) + { + writer.WritePropertyName("authIdentity"u8); + writer.WriteObjectValue(AuthIdentity, options); + } + if (Optional.IsDefined(ModelName)) + { + writer.WritePropertyName("modelName"u8); + writer.WriteStringValue(ModelName.Value.ToString()); + } + if (Optional.IsDefined(Dimensions)) + { + writer.WritePropertyName("dimensions"u8); + writer.WriteNumberValue(Dimensions.Value); + } + } + + AzureOpenAIEmbeddingSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIEmbeddingSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureOpenAIEmbeddingSkill(document.RootElement, options); + } + + internal static AzureOpenAIEmbeddingSkill DeserializeAzureOpenAIEmbeddingSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri resourceUri = default; + string deploymentId = default; + string apiKey = default; + SearchIndexerDataIdentity authIdentity = default; + AzureOpenAIModelName? modelName = default; + int? dimensions = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("resourceUri"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + resourceUri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("deploymentId"u8)) + { + deploymentId = property.Value.GetString(); + continue; + } + if (property.NameEquals("apiKey"u8)) + { + apiKey = property.Value.GetString(); + continue; + } + if (property.NameEquals("authIdentity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("modelName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + modelName = new AzureOpenAIModelName(property.Value.GetString()); + continue; + } + if (property.NameEquals("dimensions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dimensions = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureOpenAIEmbeddingSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + resourceUri, + deploymentId, + apiKey, + authIdentity, + modelName, + dimensions); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureOpenAIEmbeddingSkill)} does not support writing '{options.Format}' format."); + } + } + + AzureOpenAIEmbeddingSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIEmbeddingSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureOpenAIEmbeddingSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AzureOpenAIEmbeddingSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIEmbeddingSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.cs new file mode 100644 index 000000000000..d7e6f0a0d9be --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIEmbeddingSkill.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Allows you to generate a vector embedding for a given text input using the + /// Azure OpenAI resource. + /// + public partial class AzureOpenAIEmbeddingSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public AzureOpenAIEmbeddingSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// The resource URI of the Azure OpenAI resource. + /// ID of the Azure OpenAI model deployment on the designated resource. + /// API key of the designated Azure OpenAI resource. + /// + /// The user-assigned managed identity used for outbound connections. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The name of the embedding model that is deployed at the provided deploymentId + /// path. + /// + /// + /// The number of dimensions the resulting output embeddings should have. Only + /// supported in text-embedding-3 and later models. + /// + internal AzureOpenAIEmbeddingSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, Uri resourceUrl, string deploymentName, string apiKey, SearchIndexerDataIdentity authIdentity, AzureOpenAIModelName? modelName, int? dimensions) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + ResourceUrl = resourceUrl; + DeploymentName = deploymentName; + ApiKey = apiKey; + AuthIdentity = authIdentity; + ModelName = modelName; + Dimensions = dimensions; + } + + /// Initializes a new instance of for deserialization. + internal AzureOpenAIEmbeddingSkill() + { + } + + /// The resource URI of the Azure OpenAI resource. + public Uri ResourceUrl { get; set; } + /// ID of the Azure OpenAI model deployment on the designated resource. + public string DeploymentName { get; set; } + /// API key of the designated Azure OpenAI resource. + public string ApiKey { get; set; } + /// + /// The user-assigned managed identity used for outbound connections. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity AuthIdentity { get; set; } + /// + /// The name of the embedding model that is deployed at the provided deploymentId + /// path. + /// + public AzureOpenAIModelName? ModelName { get; set; } + /// + /// The number of dimensions the resulting output embeddings should have. Only + /// supported in text-embedding-3 and later models. + /// + public int? Dimensions { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIModelName.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIModelName.cs similarity index 92% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIModelName.cs rename to sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIModelName.cs index 24bd4068aa82..1bac164dc1e5 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIModelName.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIModelName.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The Azure Open AI model name that will be called. public readonly partial struct AzureOpenAIModelName : IEquatable @@ -26,11 +26,11 @@ public AzureOpenAIModelName(string value) private const string TextEmbedding3LargeValue = "text-embedding-3-large"; private const string TextEmbedding3SmallValue = "text-embedding-3-small"; - /// text-embedding-ada-002. + /// TextEmbeddingAda002 model. public static AzureOpenAIModelName TextEmbeddingAda002 { get; } = new AzureOpenAIModelName(TextEmbeddingAda002Value); - /// text-embedding-3-large. + /// TextEmbedding3Large model. public static AzureOpenAIModelName TextEmbedding3Large { get; } = new AzureOpenAIModelName(TextEmbedding3LargeValue); - /// text-embedding-3-small. + /// TextEmbedding3Small model. public static AzureOpenAIModelName TextEmbedding3Small { get; } = new AzureOpenAIModelName(TextEmbedding3SmallValue); /// Determines if two values are the same. public static bool operator ==(AzureOpenAIModelName left, AzureOpenAIModelName right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.Serialization.cs new file mode 100644 index 000000000000..e8747a93bc6b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureOpenAITokenizerParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAITokenizerParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(EncoderModelName)) + { + writer.WritePropertyName("encoderModelName"u8); + writer.WriteStringValue(EncoderModelName.Value.ToString()); + } + if (Optional.IsCollectionDefined(AllowedSpecialTokens)) + { + writer.WritePropertyName("allowedSpecialTokens"u8); + writer.WriteStartArray(); + foreach (var item in AllowedSpecialTokens) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AzureOpenAITokenizerParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAITokenizerParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureOpenAITokenizerParameters(document.RootElement, options); + } + + internal static AzureOpenAITokenizerParameters DeserializeAzureOpenAITokenizerParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SplitSkillEncoderModelName? encoderModelName = default; + IList allowedSpecialTokens = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("encoderModelName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encoderModelName = new SplitSkillEncoderModelName(property.Value.GetString()); + continue; + } + if (property.NameEquals("allowedSpecialTokens"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + allowedSpecialTokens = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureOpenAITokenizerParameters(encoderModelName, allowedSpecialTokens ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureOpenAITokenizerParameters)} does not support writing '{options.Format}' format."); + } + } + + AzureOpenAITokenizerParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAITokenizerParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureOpenAITokenizerParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AzureOpenAITokenizerParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAITokenizerParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.cs new file mode 100644 index 000000000000..036b97c550e4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAITokenizerParameters.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Azure OpenAI Tokenizer parameters. + public partial class AzureOpenAITokenizerParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public AzureOpenAITokenizerParameters() + { + AllowedSpecialTokens = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// Only applies if the unit is set to azureOpenAITokens. Options include + /// 'R50k_base', 'P50k_base', 'P50k_edit' and 'CL100k_base'. The default value is 'CL100k_base'. + /// + /// + /// (Optional) Only applies if the unit is set to azureOpenAITokens. This parameter + /// defines a collection of special tokens that are permitted within the + /// tokenization process. + /// + /// Keeps track of any properties unknown to the library. + internal AzureOpenAITokenizerParameters(SplitSkillEncoderModelName? encoderModelName, IList allowedSpecialTokens, IDictionary serializedAdditionalRawData) + { + EncoderModelName = encoderModelName; + AllowedSpecialTokens = allowedSpecialTokens; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// Only applies if the unit is set to azureOpenAITokens. Options include + /// 'R50k_base', 'P50k_base', 'P50k_edit' and 'CL100k_base'. The default value is 'CL100k_base'. + /// + public SplitSkillEncoderModelName? EncoderModelName { get; set; } + /// + /// (Optional) Only applies if the unit is set to azureOpenAITokens. This parameter + /// defines a collection of special tokens that are permitted within the + /// tokenization process. + /// + public IList AllowedSpecialTokens { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.Serialization.cs new file mode 100644 index 000000000000..a7d70842b71f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureOpenAIVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIVectorizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("azureOpenAIParameters"u8); + writer.WriteObjectValue(Parameters, options); + } + } + + AzureOpenAIVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureOpenAIVectorizer(document.RootElement, options); + } + + internal static AzureOpenAIVectorizer DeserializeAzureOpenAIVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + AzureOpenAIVectorizerParameters azureOpenAIParameters = default; + string name = default; + VectorSearchVectorizerKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("azureOpenAIParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + azureOpenAIParameters = AzureOpenAIVectorizerParameters.DeserializeAzureOpenAIVectorizerParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchVectorizerKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureOpenAIVectorizer(name, kind, serializedAdditionalRawData, azureOpenAIParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureOpenAIVectorizer)} does not support writing '{options.Format}' format."); + } + } + + AzureOpenAIVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureOpenAIVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new AzureOpenAIVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIVectorizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.cs new file mode 100644 index 000000000000..51f682a68f06 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizer.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Specifies the Azure OpenAI resource used to vectorize a query string. + public partial class AzureOpenAIVectorizer : VectorSearchVectorizer + { + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// is null. + public AzureOpenAIVectorizer(string vectorizerName) : base(vectorizerName) + { + Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); + + Kind = VectorSearchVectorizerKind.AzureOpenAI; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + /// Contains the parameters specific to Azure OpenAI embedding vectorization. + internal AzureOpenAIVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData, AzureOpenAIVectorizerParameters parameters) : base(vectorizerName, kind, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal AzureOpenAIVectorizer() + { + } + + /// Contains the parameters specific to Azure OpenAI embedding vectorization. + public AzureOpenAIVectorizerParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.Serialization.cs new file mode 100644 index 000000000000..0a04194e38d4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.Serialization.cs @@ -0,0 +1,207 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class AzureOpenAIVectorizerParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIVectorizerParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ResourceUrl)) + { + writer.WritePropertyName("resourceUri"u8); + writer.WriteStringValue(ResourceUrl.AbsoluteUri); + } + if (Optional.IsDefined(DeploymentName)) + { + writer.WritePropertyName("deploymentId"u8); + writer.WriteStringValue(DeploymentName); + } + if (Optional.IsDefined(ApiKey)) + { + writer.WritePropertyName("apiKey"u8); + writer.WriteStringValue(ApiKey); + } + if (Optional.IsDefined(AuthIdentity)) + { + writer.WritePropertyName("authIdentity"u8); + writer.WriteObjectValue(AuthIdentity, options); + } + if (Optional.IsDefined(ModelName)) + { + writer.WritePropertyName("modelName"u8); + writer.WriteStringValue(ModelName.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AzureOpenAIVectorizerParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzureOpenAIVectorizerParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzureOpenAIVectorizerParameters(document.RootElement, options); + } + + internal static AzureOpenAIVectorizerParameters DeserializeAzureOpenAIVectorizerParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri resourceUri = default; + string deploymentId = default; + string apiKey = default; + SearchIndexerDataIdentity authIdentity = default; + AzureOpenAIModelName? modelName = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("resourceUri"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + resourceUri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("deploymentId"u8)) + { + deploymentId = property.Value.GetString(); + continue; + } + if (property.NameEquals("apiKey"u8)) + { + apiKey = property.Value.GetString(); + continue; + } + if (property.NameEquals("authIdentity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("modelName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + modelName = new AzureOpenAIModelName(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzureOpenAIVectorizerParameters( + resourceUri, + deploymentId, + apiKey, + authIdentity, + modelName, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzureOpenAIVectorizerParameters)} does not support writing '{options.Format}' format."); + } + } + + AzureOpenAIVectorizerParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIVectorizerParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzureOpenAIVectorizerParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AzureOpenAIVectorizerParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzureOpenAIVectorizerParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.cs new file mode 100644 index 000000000000..94687cbfa727 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/AzureOpenAIVectorizerParameters.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Specifies the parameters for connecting to the Azure OpenAI resource. + public partial class AzureOpenAIVectorizerParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public AzureOpenAIVectorizerParameters() + { + } + + /// Initializes a new instance of . + /// The resource URI of the Azure OpenAI resource. + /// ID of the Azure OpenAI model deployment on the designated resource. + /// API key of the designated Azure OpenAI resource. + /// + /// The user-assigned managed identity used for outbound connections. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The name of the embedding model that is deployed at the provided deploymentId + /// path. + /// + /// Keeps track of any properties unknown to the library. + internal AzureOpenAIVectorizerParameters(Uri resourceUrl, string deploymentName, string apiKey, SearchIndexerDataIdentity authIdentity, AzureOpenAIModelName? modelName, IDictionary serializedAdditionalRawData) + { + ResourceUrl = resourceUrl; + DeploymentName = deploymentName; + ApiKey = apiKey; + AuthIdentity = authIdentity; + ModelName = modelName; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The resource URI of the Azure OpenAI resource. + public Uri ResourceUrl { get; set; } + /// ID of the Azure OpenAI model deployment on the designated resource. + public string DeploymentName { get; set; } + /// API key of the designated Azure OpenAI resource. + public string ApiKey { get; set; } + /// + /// The user-assigned managed identity used for outbound connections. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity AuthIdentity { get; set; } + /// + /// The name of the embedding model that is deployed at the provided deploymentId + /// path. + /// + public AzureOpenAIModelName? ModelName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.Serialization.cs new file mode 100644 index 000000000000..f4232cc9fec6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.Serialization.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class BM25SimilarityAlgorithm : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BM25SimilarityAlgorithm)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(K1)) + { + writer.WritePropertyName("k1"u8); + writer.WriteNumberValue(K1.Value); + } + if (Optional.IsDefined(B)) + { + writer.WritePropertyName("b"u8); + writer.WriteNumberValue(B.Value); + } + } + + BM25SimilarityAlgorithm IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BM25SimilarityAlgorithm)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBM25SimilarityAlgorithm(document.RootElement, options); + } + + internal static BM25SimilarityAlgorithm DeserializeBM25SimilarityAlgorithm(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double? k1 = default; + double? b = default; + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("k1"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k1 = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("b"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + b = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new BM25SimilarityAlgorithm(odataType, serializedAdditionalRawData, k1, b); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(BM25SimilarityAlgorithm)} does not support writing '{options.Format}' format."); + } + } + + BM25SimilarityAlgorithm IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeBM25SimilarityAlgorithm(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BM25SimilarityAlgorithm)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new BM25SimilarityAlgorithm FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeBM25SimilarityAlgorithm(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.cs new file mode 100644 index 000000000000..5beea25f0370 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BM25SimilarityAlgorithm.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Ranking function based on the Okapi BM25 similarity algorithm. BM25 is a + /// TF-IDF-like algorithm that includes length normalization (controlled by the 'b' + /// parameter) as well as term frequency saturation (controlled by the 'k1' + /// parameter). + /// + public partial class BM25SimilarityAlgorithm : SimilarityAlgorithm + { + /// Initializes a new instance of . + public BM25SimilarityAlgorithm() + { + OdataType = "#Microsoft.Azure.Search.BM25Similarity"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + /// + /// This property controls the scaling function between the term frequency of each + /// matching terms and the final relevance score of a document-query pair. By + /// default, a value of 1.2 is used. A value of 0.0 means the score does not scale + /// with an increase in term frequency. + /// + /// + /// This property controls how the length of a document affects the relevance + /// score. By default, a value of 0.75 is used. A value of 0.0 means no length + /// normalization is applied, while a value of 1.0 means the score is fully + /// normalized by the length of the document. + /// + internal BM25SimilarityAlgorithm(string odataType, IDictionary serializedAdditionalRawData, double? k1, double? b) : base(odataType, serializedAdditionalRawData) + { + K1 = k1; + B = b; + } + + /// + /// This property controls the scaling function between the term frequency of each + /// matching terms and the final relevance score of a document-query pair. By + /// default, a value of 1.2 is used. A value of 0.0 means the score does not scale + /// with an increase in term frequency. + /// + public double? K1 { get; set; } + /// + /// This property controls how the length of a document affects the relevance + /// score. By default, a value of 0.75 is used. A value of 0.0 means no length + /// normalization is applied, while a value of 1.0 means the score is fully + /// normalized by the length of the document. + /// + public double? B { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.Serialization.cs new file mode 100644 index 000000000000..cc035d84c599 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.Serialization.cs @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class BinaryQuantizationCompression : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BinaryQuantizationCompression)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + BinaryQuantizationCompression IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BinaryQuantizationCompression)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBinaryQuantizationCompression(document.RootElement, options); + } + + internal static BinaryQuantizationCompression DeserializeBinaryQuantizationCompression(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + bool? rerankWithOriginalVectors = default; + double? defaultOversampling = default; + RescoringOptions rescoringOptions = default; + int? truncationDimension = default; + VectorSearchCompressionKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("rerankWithOriginalVectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rerankWithOriginalVectors = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultOversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultOversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("rescoringOptions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value, options); + continue; + } + if (property.NameEquals("truncationDimension"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + truncationDimension = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchCompressionKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new BinaryQuantizationCompression( + name, + rerankWithOriginalVectors, + defaultOversampling, + rescoringOptions, + truncationDimension, + kind, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(BinaryQuantizationCompression)} does not support writing '{options.Format}' format."); + } + } + + BinaryQuantizationCompression IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeBinaryQuantizationCompression(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BinaryQuantizationCompression)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new BinaryQuantizationCompression FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeBinaryQuantizationCompression(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.cs new file mode 100644 index 000000000000..a077c8a13e92 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BinaryQuantizationCompression.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the binary quantization compression + /// method used during indexing and querying. + /// + public partial class BinaryQuantizationCompression : VectorSearchCompression + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + public BinaryQuantizationCompression(string compressionName) : base(compressionName) + { + Argument.AssertNotNull(compressionName, nameof(compressionName)); + + Kind = VectorSearchCompressionKind.BinaryQuantization; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// + /// If set to true, once the ordered set of results calculated using compressed + /// vectors are obtained, they will be reranked again by recalculating the + /// full-precision similarity scores. This will improve recall at the expense of + /// latency. + /// + /// + /// Default oversampling factor. Oversampling will internally request more + /// documents (specified by this multiplier) in the initial search. This increases + /// the set of results that will be reranked using recomputed similarity scores + /// from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). + /// This parameter can only be set when rerankWithOriginalVectors is true. Higher + /// values improve recall at the expense of latency. + /// + /// Contains the options for rescoring. + /// + /// The number of dimensions to truncate the vectors to. Truncating the vectors + /// reduces the size of the vectors and the amount of data that needs to be + /// transferred during search. This can save storage cost and improve search + /// performance at the expense of recall. It should be only used for embeddings + /// trained with Matryoshka Representation Learning (MRL) such as OpenAI + /// text-embedding-3-large (small). The default value is null, which means no + /// truncation. + /// + /// Type of VectorSearchCompression. + /// Keeps track of any properties unknown to the library. + internal BinaryQuantizationCompression(string compressionName, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension, VectorSearchCompressionKind kind, IDictionary serializedAdditionalRawData) : base(compressionName, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension, kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal BinaryQuantizationCompression() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerDataToExtract.cs b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerDataToExtract.cs similarity index 81% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerDataToExtract.cs rename to sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerDataToExtract.cs index 73dda0f1f12f..208deeec6329 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerDataToExtract.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerDataToExtract.cs @@ -8,9 +8,14 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when "imageAction" is set to a value other than "none". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. + /// + /// Specifies the data to extract from Azure blob storage and tells the indexer + /// which data to extract from image content when "imageAction" is set to a value + /// other than "none". This applies to embedded image content in a .PDF or other + /// application, or image files such as .jpg and .png, in Azure blobs. + /// public readonly partial struct BlobIndexerDataToExtract : IEquatable { private readonly string _value; @@ -28,7 +33,11 @@ public BlobIndexerDataToExtract(string value) /// Indexes just the standard blob properties and user-specified metadata. public static BlobIndexerDataToExtract StorageMetadata { get; } = new BlobIndexerDataToExtract(StorageMetadataValue); - /// Extracts metadata provided by the Azure blob storage subsystem and the content-type specific metadata (for example, metadata unique to just .png files are indexed). + /// + /// Extracts metadata provided by the Azure blob storage subsystem and the + /// content-type specific metadata (for example, metadata unique to just .png files + /// are indexed). + /// public static BlobIndexerDataToExtract AllMetadata { get; } = new BlobIndexerDataToExtract(AllMetadataValue); /// Extracts all metadata and textual content from each blob. public static BlobIndexerDataToExtract ContentAndMetadata { get; } = new BlobIndexerDataToExtract(ContentAndMetadataValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerImageAction.cs b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerImageAction.cs new file mode 100644 index 000000000000..a7b4610d62cc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerImageAction.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Determines how to process embedded images and image files in Azure blob + /// storage. Setting the "imageAction" configuration to any value other than + /// "none" requires that a skillset also be attached to that indexer. + /// + public readonly partial struct BlobIndexerImageAction : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public BlobIndexerImageAction(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoneValue = "none"; + private const string GenerateNormalizedImagesValue = "generateNormalizedImages"; + private const string GenerateNormalizedImagePerPageValue = "generateNormalizedImagePerPage"; + + /// Ignores embedded images or image files in the data set. This is the default. + public static BlobIndexerImageAction None { get; } = new BlobIndexerImageAction(NoneValue); + /// + /// Extracts text from images (for example, the word "STOP" from a traffic stop + /// sign), and embeds it into the content field. This action requires that + /// "dataToExtract" is set to "contentAndMetadata". A normalized image refers to + /// additional processing resulting in uniform image output, sized and rotated to + /// promote consistent rendering when you include images in visual search results. + /// This information is generated for each image when you use this option. + /// + public static BlobIndexerImageAction GenerateNormalizedImages { get; } = new BlobIndexerImageAction(GenerateNormalizedImagesValue); + /// + /// Extracts text from images (for example, the word "STOP" from a traffic stop + /// sign), and embeds it into the content field, but treats PDF files differently + /// in that each page will be rendered as an image and normalized accordingly, + /// instead of extracting embedded images. Non-PDF file types will be treated the + /// same as if "generateNormalizedImages" was set. + /// + public static BlobIndexerImageAction GenerateNormalizedImagePerPage { get; } = new BlobIndexerImageAction(GenerateNormalizedImagePerPageValue); + /// Determines if two values are the same. + public static bool operator ==(BlobIndexerImageAction left, BlobIndexerImageAction right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(BlobIndexerImageAction left, BlobIndexerImageAction right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator BlobIndexerImageAction(string value) => new BlobIndexerImageAction(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is BlobIndexerImageAction other && Equals(other); + /// + public bool Equals(BlobIndexerImageAction other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerParsingMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerParsingMode.cs similarity index 91% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerParsingMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerParsingMode.cs index a7a0e11f98aa..53a9c7889241 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerParsingMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerParsingMode.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Represents the parsing mode for indexing from an Azure blob data source. public readonly partial struct BlobIndexerParsingMode : IEquatable @@ -38,9 +38,15 @@ public BlobIndexerParsingMode(string value) public static BlobIndexerParsingMode DelimitedText { get; } = new BlobIndexerParsingMode(DelimitedTextValue); /// Set to json to extract structured content from JSON files. public static BlobIndexerParsingMode Json { get; } = new BlobIndexerParsingMode(JsonValue); - /// Set to jsonArray to extract individual elements of a JSON array as separate documents. + /// + /// Set to jsonArray to extract individual elements of a JSON array as separate + /// documents. + /// public static BlobIndexerParsingMode JsonArray { get; } = new BlobIndexerParsingMode(JsonArrayValue); - /// Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents. + /// + /// Set to jsonLines to extract individual JSON entities, separated by a new line, + /// as separate documents. + /// public static BlobIndexerParsingMode JsonLines { get; } = new BlobIndexerParsingMode(JsonLinesValue); /// Set to markdown to extract content from markdown files. public static BlobIndexerParsingMode Markdown { get; } = new BlobIndexerParsingMode(MarkdownValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerPdfTextRotationAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerPdfTextRotationAlgorithm.cs new file mode 100644 index 000000000000..8e3307d8c8d3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/BlobIndexerPdfTextRotationAlgorithm.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Determines algorithm for text extraction from PDF files in Azure blob storage. + public readonly partial struct BlobIndexerPdfTextRotationAlgorithm : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public BlobIndexerPdfTextRotationAlgorithm(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoneValue = "none"; + private const string DetectAnglesValue = "detectAngles"; + + /// Leverages normal text extraction. This is the default. + public static Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm None { get; } = new Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm(NoneValue); + /// + /// May produce better and more readable text extraction from PDF files that have + /// rotated text within them. Note that there may be a small performance speed + /// impact when this parameter is used. This parameter only applies to PDF files, + /// and only to PDFs with embedded text. If the rotated text appears within an + /// embedded image in the PDF, this parameter does not apply. + /// + public static Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm DetectAngles { get; } = new Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm(DetectAnglesValue); + /// Determines if two values are the same. + public static bool operator ==(Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm left, Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm left, Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm(string value) => new Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm other && Equals(other); + /// + public bool Equals(Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.Serialization.cs new file mode 100644 index 000000000000..f871a1b27568 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.Serialization.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownCharFilter))] + public partial class CharFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CharFilter)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + CharFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CharFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCharFilter(document.RootElement, options); + } + + internal static CharFilter DeserializeCharFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.MappingCharFilter": return MappingCharFilter.DeserializeMappingCharFilter(element, options); + case "#Microsoft.Azure.Search.PatternReplaceCharFilter": return PatternReplaceCharFilter.DeserializePatternReplaceCharFilter(element, options); + } + } + return UnknownCharFilter.DeserializeUnknownCharFilter(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CharFilter)} does not support writing '{options.Format}' format."); + } + } + + CharFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCharFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CharFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CharFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCharFilter(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.cs new file mode 100644 index 000000000000..2f467a4c830a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CharFilter.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for character filters. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class CharFilter + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + protected CharFilter(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal CharFilter(string odataType, string name, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CharFilter() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + public string Name { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilterName.cs b/sdk/search/Azure.Search.Documents/src/Generated/CharFilterName.cs similarity index 88% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilterName.cs rename to sdk/search/Azure.Search.Documents/src/Generated/CharFilterName.cs index 09e8bc50231b..9a0c7e663bcd 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilterName.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/CharFilterName.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Defines the names of all character filters supported by the search engine. public readonly partial struct CharFilterName : IEquatable @@ -24,7 +24,10 @@ public CharFilterName(string value) private const string HtmlStripValue = "html_strip"; - /// A character filter that attempts to strip out HTML constructs. See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.html. + /// + /// A character filter that attempts to strip out HTML constructs. See + /// https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.html + /// public static CharFilterName HtmlStrip { get; } = new CharFilterName(HtmlStripValue); /// Determines if two values are the same. public static bool operator ==(CharFilterName left, CharFilterName right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.Serialization.cs new file mode 100644 index 000000000000..b080566d5717 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CjkBigramTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CjkBigramTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(IgnoreScripts)) + { + writer.WritePropertyName("ignoreScripts"u8); + writer.WriteStartArray(); + foreach (var item in IgnoreScripts) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(OutputUnigrams)) + { + writer.WritePropertyName("outputUnigrams"u8); + writer.WriteBooleanValue(OutputUnigrams.Value); + } + } + + CjkBigramTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CjkBigramTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCjkBigramTokenFilter(document.RootElement, options); + } + + internal static CjkBigramTokenFilter DeserializeCjkBigramTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList ignoreScripts = default; + bool? outputUnigrams = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("ignoreScripts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new CjkBigramTokenFilterScripts(item.GetString())); + } + ignoreScripts = array; + continue; + } + if (property.NameEquals("outputUnigrams"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + outputUnigrams = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CjkBigramTokenFilter(odataType, name, serializedAdditionalRawData, ignoreScripts ?? new ChangeTrackingList(), outputUnigrams); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CjkBigramTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + CjkBigramTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCjkBigramTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CjkBigramTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CjkBigramTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCjkBigramTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.cs new file mode 100644 index 000000000000..0aa83b68af43 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilter.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Forms bigrams of CJK terms that are generated from the standard tokenizer. This + /// token filter is implemented using Apache Lucene. + /// + public partial class CjkBigramTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public CjkBigramTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.CjkBigramTokenFilter"; + IgnoreScripts = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The scripts to ignore. + /// + /// A value indicating whether to output both unigrams and bigrams (if true), or + /// just bigrams (if false). Default is false. + /// + internal CjkBigramTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList ignoreScripts, bool? outputUnigrams) : base(odataType, name, serializedAdditionalRawData) + { + IgnoreScripts = ignoreScripts; + OutputUnigrams = outputUnigrams; + } + + /// Initializes a new instance of for deserialization. + internal CjkBigramTokenFilter() + { + } + + /// The scripts to ignore. + public IList IgnoreScripts { get; } + /// + /// A value indicating whether to output both unigrams and bigrams (if true), or + /// just bigrams (if false). Default is false. + /// + public bool? OutputUnigrams { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilterScripts.cs b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilterScripts.cs new file mode 100644 index 000000000000..c49ea6613714 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CjkBigramTokenFilterScripts.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Scripts that can be ignored by CjkBigramTokenFilter. + public readonly partial struct CjkBigramTokenFilterScripts : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public CjkBigramTokenFilterScripts(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string HanValue = "han"; + private const string HiraganaValue = "hiragana"; + private const string KatakanaValue = "katakana"; + private const string HangulValue = "hangul"; + + /// Ignore Han script when forming bigrams of CJK terms. + public static CjkBigramTokenFilterScripts Han { get; } = new CjkBigramTokenFilterScripts(HanValue); + /// Ignore Hiragana script when forming bigrams of CJK terms. + public static CjkBigramTokenFilterScripts Hiragana { get; } = new CjkBigramTokenFilterScripts(HiraganaValue); + /// Ignore Katakana script when forming bigrams of CJK terms. + public static CjkBigramTokenFilterScripts Katakana { get; } = new CjkBigramTokenFilterScripts(KatakanaValue); + /// Ignore Hangul script when forming bigrams of CJK terms. + public static CjkBigramTokenFilterScripts Hangul { get; } = new CjkBigramTokenFilterScripts(HangulValue); + /// Determines if two values are the same. + public static bool operator ==(CjkBigramTokenFilterScripts left, CjkBigramTokenFilterScripts right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(CjkBigramTokenFilterScripts left, CjkBigramTokenFilterScripts right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator CjkBigramTokenFilterScripts(string value) => new CjkBigramTokenFilterScripts(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is CjkBigramTokenFilterScripts other && Equals(other); + /// + public bool Equals(CjkBigramTokenFilterScripts other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.Serialization.cs new file mode 100644 index 000000000000..737b1da202c5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ClassicSimilarityAlgorithm : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ClassicSimilarityAlgorithm)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + ClassicSimilarityAlgorithm IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ClassicSimilarityAlgorithm)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeClassicSimilarityAlgorithm(document.RootElement, options); + } + + internal static ClassicSimilarityAlgorithm DeserializeClassicSimilarityAlgorithm(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ClassicSimilarityAlgorithm(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ClassicSimilarityAlgorithm)} does not support writing '{options.Format}' format."); + } + } + + ClassicSimilarityAlgorithm IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeClassicSimilarityAlgorithm(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ClassicSimilarityAlgorithm)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ClassicSimilarityAlgorithm FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeClassicSimilarityAlgorithm(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.cs new file mode 100644 index 000000000000..b9f6f0c7a94f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ClassicSimilarityAlgorithm.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Legacy similarity algorithm which uses the Lucene TFIDFSimilarity + /// implementation of TF-IDF. This variation of TF-IDF introduces static document + /// length normalization as well as coordinating factors that penalize documents + /// that only partially match the searched queries. + /// + public partial class ClassicSimilarityAlgorithm : SimilarityAlgorithm + { + /// Initializes a new instance of . + public ClassicSimilarityAlgorithm() + { + OdataType = "#Microsoft.Azure.Search.ClassicSimilarity"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal ClassicSimilarityAlgorithm(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.Serialization.cs new file mode 100644 index 000000000000..781bb3a9ac8b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ClassicTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ClassicTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + } + + ClassicTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ClassicTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeClassicTokenizer(document.RootElement, options); + } + + internal static ClassicTokenizer DeserializeClassicTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ClassicTokenizer(odataType, name, serializedAdditionalRawData, maxTokenLength); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ClassicTokenizer)} does not support writing '{options.Format}' format."); + } + } + + ClassicTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeClassicTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ClassicTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ClassicTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeClassicTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.cs new file mode 100644 index 000000000000..49cc37140f59 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ClassicTokenizer.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Grammar-based tokenizer that is suitable for processing most European-language + /// documents. This tokenizer is implemented using Apache Lucene. + /// + public partial class ClassicTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public ClassicTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.ClassicTokenizer"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + internal ClassicTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + } + + /// Initializes a new instance of for deserialization. + internal ClassicTokenizer() + { + } + + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + public int? MaxTokenLength { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.Serialization.cs new file mode 100644 index 000000000000..e14f3f1e2975 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.Serialization.cs @@ -0,0 +1,141 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownCognitiveServicesAccount))] + public partial class CognitiveServicesAccount : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + CognitiveServicesAccount IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCognitiveServicesAccount(document.RootElement, options); + } + + internal static CognitiveServicesAccount DeserializeCognitiveServicesAccount(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.AIServicesByIdentity": return AIServicesAccountIdentity.DeserializeAIServicesAccountIdentity(element, options); + case "#Microsoft.Azure.Search.AIServicesByKey": return AIServicesAccountKey.DeserializeAIServicesAccountKey(element, options); + case "#Microsoft.Azure.Search.CognitiveServicesByKey": return CognitiveServicesAccountKey.DeserializeCognitiveServicesAccountKey(element, options); + case "#Microsoft.Azure.Search.DefaultCognitiveServices": return DefaultCognitiveServicesAccount.DeserializeDefaultCognitiveServicesAccount(element, options); + } + } + return UnknownCognitiveServicesAccount.DeserializeUnknownCognitiveServicesAccount(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support writing '{options.Format}' format."); + } + } + + CognitiveServicesAccount IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCognitiveServicesAccount(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CognitiveServicesAccount FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCognitiveServicesAccount(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.cs new file mode 100644 index 000000000000..81c6fbbebf3d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccount.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for describing any Azure AI service resource attached to a skillset. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public abstract partial class CognitiveServicesAccount + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected CognitiveServicesAccount() + { + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + internal CognitiveServicesAccount(string odataType, string description, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Description = description; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// Description of the Azure AI service resource attached to a skillset. + public string Description { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.Serialization.cs new file mode 100644 index 000000000000..603eb307c79e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.Serialization.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CognitiveServicesAccountKey : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccountKey)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + } + + CognitiveServicesAccountKey IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccountKey)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCognitiveServicesAccountKey(document.RootElement, options); + } + + internal static CognitiveServicesAccountKey DeserializeCognitiveServicesAccountKey(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string key = default; + string odataType = default; + string description = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CognitiveServicesAccountKey(odataType, description, serializedAdditionalRawData, key); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccountKey)} does not support writing '{options.Format}' format."); + } + } + + CognitiveServicesAccountKey IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCognitiveServicesAccountKey(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccountKey)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CognitiveServicesAccountKey FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCognitiveServicesAccountKey(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.cs new file mode 100644 index 000000000000..d146d9286d03 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CognitiveServicesAccountKey.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The multi-region account key of an Azure AI service resource that's attached to + /// a skillset. + /// + public partial class CognitiveServicesAccountKey : CognitiveServicesAccount + { + /// Initializes a new instance of . + /// The key used to provision the Azure AI service resource attached to a skillset. + /// is null. + public CognitiveServicesAccountKey(string key) + { + Argument.AssertNotNull(key, nameof(key)); + + OdataType = "#Microsoft.Azure.Search.CognitiveServicesByKey"; + Key = key; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + /// The key used to provision the Azure AI service resource attached to a skillset. + internal CognitiveServicesAccountKey(string odataType, string description, IDictionary serializedAdditionalRawData, string key) : base(odataType, description, serializedAdditionalRawData) + { + Key = key; + } + + /// Initializes a new instance of for deserialization. + internal CognitiveServicesAccountKey() + { + } + + /// The key used to provision the Azure AI service resource attached to a skillset. + public string Key { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.Serialization.cs new file mode 100644 index 000000000000..74717f2507e4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.Serialization.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CommonGramTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CommonGramTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("commonWords"u8); + writer.WriteStartArray(); + foreach (var item in CommonWords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(IgnoreCase)) + { + writer.WritePropertyName("ignoreCase"u8); + writer.WriteBooleanValue(IgnoreCase.Value); + } + if (Optional.IsDefined(UseQueryMode)) + { + writer.WritePropertyName("queryMode"u8); + writer.WriteBooleanValue(UseQueryMode.Value); + } + } + + CommonGramTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CommonGramTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCommonGramTokenFilter(document.RootElement, options); + } + + internal static CommonGramTokenFilter DeserializeCommonGramTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList commonWords = default; + bool? ignoreCase = default; + bool? queryMode = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("commonWords"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + commonWords = array; + continue; + } + if (property.NameEquals("ignoreCase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreCase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("queryMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + queryMode = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CommonGramTokenFilter( + odataType, + name, + serializedAdditionalRawData, + commonWords, + ignoreCase, + queryMode); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CommonGramTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + CommonGramTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCommonGramTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CommonGramTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CommonGramTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCommonGramTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.cs new file mode 100644 index 000000000000..7f0c3209d00d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CommonGramTokenFilter.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Construct bigrams for frequently occurring terms while indexing. Single terms + /// are still indexed too, with bigrams overlaid. This token filter is implemented + /// using Apache Lucene. + /// + public partial class CommonGramTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// The set of common words. + /// or is null. + public CommonGramTokenFilter(string name, IEnumerable commonWords) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(commonWords, nameof(commonWords)); + + OdataType = "#Microsoft.Azure.Search.CommonGramTokenFilter"; + CommonWords = commonWords.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The set of common words. + /// + /// A value indicating whether common words matching will be case insensitive. + /// Default is false. + /// + /// + /// A value that indicates whether the token filter is in query mode. When in query + /// mode, the token filter generates bigrams and then removes common words and + /// single terms followed by a common word. Default is false. + /// + internal CommonGramTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList commonWords, bool? ignoreCase, bool? useQueryMode) : base(odataType, name, serializedAdditionalRawData) + { + CommonWords = commonWords; + IgnoreCase = ignoreCase; + UseQueryMode = useQueryMode; + } + + /// Initializes a new instance of for deserialization. + internal CommonGramTokenFilter() + { + } + + /// The set of common words. + public IList CommonWords { get; } + /// + /// A value indicating whether common words matching will be case insensitive. + /// Default is false. + /// + public bool? IgnoreCase { get; set; } + /// + /// A value that indicates whether the token filter is in query mode. When in query + /// mode, the token filter generates bigrams and then removes common words and + /// single terms followed by a common word. Default is false. + /// + public bool? UseQueryMode { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.Serialization.cs new file mode 100644 index 000000000000..ee23c8d805ed --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ConditionalSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ConditionalSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + ConditionalSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ConditionalSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeConditionalSkill(document.RootElement, options); + } + + internal static ConditionalSkill DeserializeConditionalSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ConditionalSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ConditionalSkill)} does not support writing '{options.Format}' format."); + } + } + + ConditionalSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeConditionalSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ConditionalSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ConditionalSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeConditionalSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.cs new file mode 100644 index 000000000000..1047719eddae --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ConditionalSkill.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill that enables scenarios that require a Boolean operation to determine + /// the data to assign to an output. + /// + public partial class ConditionalSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public ConditionalSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Util.ConditionalSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + internal ConditionalSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal ConditionalSkill() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.Serialization.cs new file mode 100644 index 000000000000..847eb83cd385 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CorsOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CorsOptions)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("allowedOrigins"u8); + writer.WriteStartArray(); + foreach (var item in AllowedOrigins) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(MaxAgeInSeconds)) + { + writer.WritePropertyName("maxAgeInSeconds"u8); + writer.WriteNumberValue(MaxAgeInSeconds.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + CorsOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CorsOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCorsOptions(document.RootElement, options); + } + + internal static CorsOptions DeserializeCorsOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList allowedOrigins = default; + long? maxAgeInSeconds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("allowedOrigins"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + allowedOrigins = array; + continue; + } + if (property.NameEquals("maxAgeInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxAgeInSeconds = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CorsOptions(allowedOrigins, maxAgeInSeconds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CorsOptions)} does not support writing '{options.Format}' format."); + } + } + + CorsOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCorsOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CorsOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CorsOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCorsOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.cs new file mode 100644 index 000000000000..41baff05f527 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CorsOptions.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Defines options to control Cross-Origin Resource Sharing (CORS) for an index. + public partial class CorsOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The list of origins from which JavaScript code will be granted access to your + /// index. Can contain a list of hosts of the form + /// {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow + /// all origins (not recommended). + /// + /// is null. + public CorsOptions(IEnumerable allowedOrigins) + { + Argument.AssertNotNull(allowedOrigins, nameof(allowedOrigins)); + + AllowedOrigins = allowedOrigins.ToList(); + } + + /// Initializes a new instance of . + /// + /// The list of origins from which JavaScript code will be granted access to your + /// index. Can contain a list of hosts of the form + /// {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow + /// all origins (not recommended). + /// + /// + /// The duration for which browsers should cache CORS preflight responses. Defaults + /// to 5 minutes. + /// + /// Keeps track of any properties unknown to the library. + internal CorsOptions(IList allowedOrigins, long? maxAgeInSeconds, IDictionary serializedAdditionalRawData) + { + AllowedOrigins = allowedOrigins; + MaxAgeInSeconds = maxAgeInSeconds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CorsOptions() + { + } + + /// + /// The list of origins from which JavaScript code will be granted access to your + /// index. Can contain a list of hosts of the form + /// {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow + /// all origins (not recommended). + /// + public IList AllowedOrigins { get; } + /// + /// The duration for which browsers should cache CORS preflight responses. Defaults + /// to 5 minutes. + /// + public long? MaxAgeInSeconds { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.Serialization.cs new file mode 100644 index 000000000000..605c7c7b53f5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.Serialization.cs @@ -0,0 +1,196 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CustomAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomAnalyzer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("tokenizer"u8); + writer.WriteStringValue(Tokenizer.ToString()); + if (Optional.IsCollectionDefined(TokenFilters)) + { + writer.WritePropertyName("tokenFilters"u8); + writer.WriteStartArray(); + foreach (var item in TokenFilters) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(CharFilters)) + { + writer.WritePropertyName("charFilters"u8); + writer.WriteStartArray(); + foreach (var item in CharFilters) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + } + + CustomAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCustomAnalyzer(document.RootElement, options); + } + + internal static CustomAnalyzer DeserializeCustomAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + LexicalTokenizerName tokenizer = default; + IList tokenFilters = default; + IList charFilters = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tokenizer"u8)) + { + tokenizer = new LexicalTokenizerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("tokenFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new TokenFilterName(item.GetString())); + } + tokenFilters = array; + continue; + } + if (property.NameEquals("charFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new CharFilterName(item.GetString())); + } + charFilters = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CustomAnalyzer( + odataType, + name, + serializedAdditionalRawData, + tokenizer, + tokenFilters ?? new ChangeTrackingList(), + charFilters ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CustomAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + CustomAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CustomAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CustomAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomAnalyzer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.cs new file mode 100644 index 000000000000..1f877f94a408 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomAnalyzer.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Allows you to take control over the process of converting text into + /// indexable/searchable tokens. It's a user-defined configuration consisting of a + /// single predefined tokenizer and one or more filters. The tokenizer is + /// responsible for breaking text into tokens, and the filters for modifying tokens + /// emitted by the tokenizer. + /// + public partial class CustomAnalyzer : LexicalAnalyzer + { + /// Initializes a new instance of . + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// + /// The name of the tokenizer to use to divide continuous text into a sequence of + /// tokens, such as breaking a sentence into words. + /// + /// is null. + public CustomAnalyzer(string name, LexicalTokenizerName tokenizer) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.CustomAnalyzer"; + Tokenizer = tokenizer; + TokenFilters = new ChangeTrackingList(); + CharFilters = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The name of the tokenizer to use to divide continuous text into a sequence of + /// tokens, such as breaking a sentence into words. + /// + /// + /// A list of token filters used to filter out or modify the tokens generated by a + /// tokenizer. For example, you can specify a lowercase filter that converts all + /// characters to lowercase. The filters are run in the order in which they are + /// listed. + /// + /// + /// A list of character filters used to prepare input text before it is processed + /// by the tokenizer. For instance, they can replace certain characters or symbols. + /// The filters are run in the order in which they are listed. + /// + internal CustomAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData, LexicalTokenizerName tokenizer, IList tokenFilters, IList charFilters) : base(odataType, name, serializedAdditionalRawData) + { + Tokenizer = tokenizer; + TokenFilters = tokenFilters; + CharFilters = charFilters; + } + + /// Initializes a new instance of for deserialization. + internal CustomAnalyzer() + { + } + + /// + /// The name of the tokenizer to use to divide continuous text into a sequence of + /// tokens, such as breaking a sentence into words. + /// + public LexicalTokenizerName Tokenizer { get; set; } + /// + /// A list of token filters used to filter out or modify the tokens generated by a + /// tokenizer. For example, you can specify a lowercase filter that converts all + /// characters to lowercase. The filters are run in the order in which they are + /// listed. + /// + public IList TokenFilters { get; } + /// + /// A list of character filters used to prepare input text before it is processed + /// by the tokenizer. For instance, they can replace certain characters or symbols. + /// The filters are run in the order in which they are listed. + /// + public IList CharFilters { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.Serialization.cs new file mode 100644 index 000000000000..d79cf5f4ddf0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.Serialization.cs @@ -0,0 +1,314 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CustomEntity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntity)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(Type)) + { + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type); + } + if (Optional.IsDefined(Subtype)) + { + writer.WritePropertyName("subtype"u8); + writer.WriteStringValue(Subtype); + } + if (Optional.IsDefined(Id)) + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + if (Optional.IsDefined(CaseSensitive)) + { + writer.WritePropertyName("caseSensitive"u8); + writer.WriteBooleanValue(CaseSensitive.Value); + } + if (Optional.IsDefined(AccentSensitive)) + { + writer.WritePropertyName("accentSensitive"u8); + writer.WriteBooleanValue(AccentSensitive.Value); + } + if (Optional.IsDefined(FuzzyEditDistance)) + { + writer.WritePropertyName("fuzzyEditDistance"u8); + writer.WriteNumberValue(FuzzyEditDistance.Value); + } + if (Optional.IsDefined(DefaultCaseSensitive)) + { + writer.WritePropertyName("defaultCaseSensitive"u8); + writer.WriteBooleanValue(DefaultCaseSensitive.Value); + } + if (Optional.IsDefined(DefaultAccentSensitive)) + { + writer.WritePropertyName("defaultAccentSensitive"u8); + writer.WriteBooleanValue(DefaultAccentSensitive.Value); + } + if (Optional.IsDefined(DefaultFuzzyEditDistance)) + { + writer.WritePropertyName("defaultFuzzyEditDistance"u8); + writer.WriteNumberValue(DefaultFuzzyEditDistance.Value); + } + if (Optional.IsCollectionDefined(Aliases)) + { + writer.WritePropertyName("aliases"u8); + writer.WriteStartArray(); + foreach (var item in Aliases) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + CustomEntity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCustomEntity(document.RootElement, options); + } + + internal static CustomEntity DeserializeCustomEntity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string description = default; + string type = default; + string subtype = default; + string id = default; + bool? caseSensitive = default; + bool? accentSensitive = default; + int? fuzzyEditDistance = default; + bool? defaultCaseSensitive = default; + bool? defaultAccentSensitive = default; + int? defaultFuzzyEditDistance = default; + IList aliases = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("subtype"u8)) + { + subtype = property.Value.GetString(); + continue; + } + if (property.NameEquals("id"u8)) + { + id = property.Value.GetString(); + continue; + } + if (property.NameEquals("caseSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caseSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("accentSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + accentSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("fuzzyEditDistance"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fuzzyEditDistance = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("defaultCaseSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultCaseSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultAccentSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultAccentSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultFuzzyEditDistance"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultFuzzyEditDistance = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("aliases"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(CustomEntityAlias.DeserializeCustomEntityAlias(item, options)); + } + aliases = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CustomEntity( + name, + description, + type, + subtype, + id, + caseSensitive, + accentSensitive, + fuzzyEditDistance, + defaultCaseSensitive, + defaultAccentSensitive, + defaultFuzzyEditDistance, + aliases ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CustomEntity)} does not support writing '{options.Format}' format."); + } + } + + CustomEntity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CustomEntity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CustomEntity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntity(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.cs new file mode 100644 index 000000000000..25f46f5e2995 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntity.cs @@ -0,0 +1,219 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// An object that contains information about the matches that were found, and + /// related metadata. + /// + public partial class CustomEntity + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The top-level entity descriptor. Matches in the skill output will be grouped by + /// this name, and it should represent the "normalized" form of the text being + /// found. + /// + /// is null. + public CustomEntity(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + Aliases = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// The top-level entity descriptor. Matches in the skill output will be grouped by + /// this name, and it should represent the "normalized" form of the text being + /// found. + /// + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + /// + /// Defaults to false. Boolean value denoting whether comparisons with the entity + /// name should be sensitive to character casing. Sample case insensitive matches + /// of "Microsoft" could be: microsoft, microSoft, MICROSOFT. + /// + /// + /// Defaults to false. Boolean value denoting whether comparisons with the entity + /// name should be sensitive to accent. + /// + /// + /// Defaults to 0. Maximum value of 5. Denotes the acceptable number of divergent + /// characters that would still constitute a match with the entity name. The + /// smallest possible fuzziness for any given match is returned. For instance, if + /// the edit distance is set to 3, "Windows10" would still match "Windows", + /// "Windows10" and "Windows 7". When case sensitivity is set to false, case + /// differences do NOT count towards fuzziness tolerance, but otherwise do. + /// + /// + /// Changes the default case sensitivity value for this entity. It be used to + /// change the default value of all aliases caseSensitive values. + /// + /// + /// Changes the default accent sensitivity value for this entity. It be used to + /// change the default value of all aliases accentSensitive values. + /// + /// + /// Changes the default fuzzy edit distance value for this entity. It can be used + /// to change the default value of all aliases fuzzyEditDistance values. + /// + /// + /// An array of complex objects that can be used to specify alternative spellings + /// or synonyms to the root entity name. + /// + /// Keeps track of any properties unknown to the library. + internal CustomEntity(string name, string description, string type, string subtype, string id, bool? caseSensitive, bool? accentSensitive, int? fuzzyEditDistance, bool? defaultCaseSensitive, bool? defaultAccentSensitive, int? defaultFuzzyEditDistance, IList aliases, IDictionary serializedAdditionalRawData) + { + Name = name; + Description = description; + Type = type; + Subtype = subtype; + Id = id; + CaseSensitive = caseSensitive; + AccentSensitive = accentSensitive; + FuzzyEditDistance = fuzzyEditDistance; + DefaultCaseSensitive = defaultCaseSensitive; + DefaultAccentSensitive = defaultAccentSensitive; + DefaultFuzzyEditDistance = defaultFuzzyEditDistance; + Aliases = aliases; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CustomEntity() + { + } + + /// + /// The top-level entity descriptor. Matches in the skill output will be grouped by + /// this name, and it should represent the "normalized" form of the text being + /// found. + /// + public string Name { get; set; } + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + public string Description { get; set; } + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + public string Type { get; set; } + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + public string Subtype { get; set; } + /// + /// This field can be used as a passthrough for custom metadata about the matched + /// text(s). The value of this field will appear with every match of its entity in + /// the skill output. + /// + public string Id { get; set; } + /// + /// Defaults to false. Boolean value denoting whether comparisons with the entity + /// name should be sensitive to character casing. Sample case insensitive matches + /// of "Microsoft" could be: microsoft, microSoft, MICROSOFT. + /// + public bool? CaseSensitive { get; set; } + /// + /// Defaults to false. Boolean value denoting whether comparisons with the entity + /// name should be sensitive to accent. + /// + public bool? AccentSensitive { get; set; } + /// + /// Defaults to 0. Maximum value of 5. Denotes the acceptable number of divergent + /// characters that would still constitute a match with the entity name. The + /// smallest possible fuzziness for any given match is returned. For instance, if + /// the edit distance is set to 3, "Windows10" would still match "Windows", + /// "Windows10" and "Windows 7". When case sensitivity is set to false, case + /// differences do NOT count towards fuzziness tolerance, but otherwise do. + /// + public int? FuzzyEditDistance { get; set; } + /// + /// Changes the default case sensitivity value for this entity. It be used to + /// change the default value of all aliases caseSensitive values. + /// + public bool? DefaultCaseSensitive { get; set; } + /// + /// Changes the default accent sensitivity value for this entity. It be used to + /// change the default value of all aliases accentSensitive values. + /// + public bool? DefaultAccentSensitive { get; set; } + /// + /// Changes the default fuzzy edit distance value for this entity. It can be used + /// to change the default value of all aliases fuzzyEditDistance values. + /// + public int? DefaultFuzzyEditDistance { get; set; } + /// + /// An array of complex objects that can be used to specify alternative spellings + /// or synonyms to the root entity name. + /// + public IList Aliases { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.Serialization.cs new file mode 100644 index 000000000000..af14ee4f58cc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.Serialization.cs @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CustomEntityAlias : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntityAlias)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (Optional.IsDefined(CaseSensitive)) + { + writer.WritePropertyName("caseSensitive"u8); + writer.WriteBooleanValue(CaseSensitive.Value); + } + if (Optional.IsDefined(AccentSensitive)) + { + writer.WritePropertyName("accentSensitive"u8); + writer.WriteBooleanValue(AccentSensitive.Value); + } + if (Optional.IsDefined(FuzzyEditDistance)) + { + writer.WritePropertyName("fuzzyEditDistance"u8); + writer.WriteNumberValue(FuzzyEditDistance.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + CustomEntityAlias IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntityAlias)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCustomEntityAlias(document.RootElement, options); + } + + internal static CustomEntityAlias DeserializeCustomEntityAlias(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + bool? caseSensitive = default; + bool? accentSensitive = default; + int? fuzzyEditDistance = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("caseSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caseSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("accentSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + accentSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("fuzzyEditDistance"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fuzzyEditDistance = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CustomEntityAlias(text, caseSensitive, accentSensitive, fuzzyEditDistance, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CustomEntityAlias)} does not support writing '{options.Format}' format."); + } + } + + CustomEntityAlias IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntityAlias(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CustomEntityAlias)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CustomEntityAlias FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntityAlias(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.cs new file mode 100644 index 000000000000..cb54cdc9b476 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityAlias.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A complex object that can be used to specify alternative spellings or synonyms + /// to the root entity name. + /// + public partial class CustomEntityAlias + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The text of the alias. + /// is null. + public CustomEntityAlias(string text) + { + Argument.AssertNotNull(text, nameof(text)); + + Text = text; + } + + /// Initializes a new instance of . + /// The text of the alias. + /// Determine if the alias is case sensitive. + /// Determine if the alias is accent sensitive. + /// Determine the fuzzy edit distance of the alias. + /// Keeps track of any properties unknown to the library. + internal CustomEntityAlias(string text, bool? caseSensitive, bool? accentSensitive, int? fuzzyEditDistance, IDictionary serializedAdditionalRawData) + { + Text = text; + CaseSensitive = caseSensitive; + AccentSensitive = accentSensitive; + FuzzyEditDistance = fuzzyEditDistance; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CustomEntityAlias() + { + } + + /// The text of the alias. + public string Text { get; set; } + /// Determine if the alias is case sensitive. + public bool? CaseSensitive { get; set; } + /// Determine if the alias is accent sensitive. + public bool? AccentSensitive { get; set; } + /// Determine the fuzzy edit distance of the alias. + public int? FuzzyEditDistance { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.Serialization.cs new file mode 100644 index 000000000000..75de2f01651b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.Serialization.cs @@ -0,0 +1,275 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CustomEntityLookupSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntityLookupSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(EntitiesDefinitionUri)) + { + writer.WritePropertyName("entitiesDefinitionUri"u8); + writer.WriteStringValue(EntitiesDefinitionUri); + } + if (Optional.IsCollectionDefined(InlineEntitiesDefinition)) + { + writer.WritePropertyName("inlineEntitiesDefinition"u8); + writer.WriteStartArray(); + foreach (var item in InlineEntitiesDefinition) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(GlobalDefaultCaseSensitive)) + { + writer.WritePropertyName("globalDefaultCaseSensitive"u8); + writer.WriteBooleanValue(GlobalDefaultCaseSensitive.Value); + } + if (Optional.IsDefined(GlobalDefaultAccentSensitive)) + { + writer.WritePropertyName("globalDefaultAccentSensitive"u8); + writer.WriteBooleanValue(GlobalDefaultAccentSensitive.Value); + } + if (Optional.IsDefined(GlobalDefaultFuzzyEditDistance)) + { + writer.WritePropertyName("globalDefaultFuzzyEditDistance"u8); + writer.WriteNumberValue(GlobalDefaultFuzzyEditDistance.Value); + } + } + + CustomEntityLookupSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomEntityLookupSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCustomEntityLookupSkill(document.RootElement, options); + } + + internal static CustomEntityLookupSkill DeserializeCustomEntityLookupSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + CustomEntityLookupSkillLanguage? defaultLanguageCode = default; + string entitiesDefinitionUri = default; + IList inlineEntitiesDefinition = default; + bool? globalDefaultCaseSensitive = default; + bool? globalDefaultAccentSensitive = default; + int? globalDefaultFuzzyEditDistance = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new CustomEntityLookupSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("entitiesDefinitionUri"u8)) + { + entitiesDefinitionUri = property.Value.GetString(); + continue; + } + if (property.NameEquals("inlineEntitiesDefinition"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(CustomEntity.DeserializeCustomEntity(item, options)); + } + inlineEntitiesDefinition = array; + continue; + } + if (property.NameEquals("globalDefaultCaseSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + globalDefaultCaseSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("globalDefaultAccentSensitive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + globalDefaultAccentSensitive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("globalDefaultFuzzyEditDistance"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + globalDefaultFuzzyEditDistance = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CustomEntityLookupSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + entitiesDefinitionUri, + inlineEntitiesDefinition ?? new ChangeTrackingList(), + globalDefaultCaseSensitive, + globalDefaultAccentSensitive, + globalDefaultFuzzyEditDistance); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CustomEntityLookupSkill)} does not support writing '{options.Format}' format."); + } + } + + CustomEntityLookupSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntityLookupSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CustomEntityLookupSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CustomEntityLookupSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomEntityLookupSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.cs new file mode 100644 index 000000000000..7f5a83deda01 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkill.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill looks for text from a custom, user-defined list of words and phrases. + public partial class CustomEntityLookupSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public CustomEntityLookupSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.CustomEntityLookupSkill"; + InlineEntitiesDefinition = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// + /// Path to a JSON or CSV file containing all the target text to match against. + /// This entity definition is read at the beginning of an indexer run. Any updates + /// to this file during an indexer run will not take effect until subsequent runs. + /// This config must be accessible over HTTPS. + /// + /// The inline CustomEntity definition. + /// + /// A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, + /// this value will be the default value. + /// + /// + /// A global flag for AccentSensitive. If AccentSensitive is not set in + /// CustomEntity, this value will be the default value. + /// + /// + /// A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in + /// CustomEntity, this value will be the default value. + /// + internal CustomEntityLookupSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, CustomEntityLookupSkillLanguage? defaultLanguageCode, string entitiesDefinitionUri, IList inlineEntitiesDefinition, bool? globalDefaultCaseSensitive, bool? globalDefaultAccentSensitive, int? globalDefaultFuzzyEditDistance) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + EntitiesDefinitionUri = entitiesDefinitionUri; + InlineEntitiesDefinition = inlineEntitiesDefinition; + GlobalDefaultCaseSensitive = globalDefaultCaseSensitive; + GlobalDefaultAccentSensitive = globalDefaultAccentSensitive; + GlobalDefaultFuzzyEditDistance = globalDefaultFuzzyEditDistance; + } + + /// Initializes a new instance of for deserialization. + internal CustomEntityLookupSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public CustomEntityLookupSkillLanguage? DefaultLanguageCode { get; set; } + /// + /// Path to a JSON or CSV file containing all the target text to match against. + /// This entity definition is read at the beginning of an indexer run. Any updates + /// to this file during an indexer run will not take effect until subsequent runs. + /// This config must be accessible over HTTPS. + /// + public string EntitiesDefinitionUri { get; set; } + /// The inline CustomEntity definition. + public IList InlineEntitiesDefinition { get; } + /// + /// A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, + /// this value will be the default value. + /// + public bool? GlobalDefaultCaseSensitive { get; set; } + /// + /// A global flag for AccentSensitive. If AccentSensitive is not set in + /// CustomEntity, this value will be the default value. + /// + public bool? GlobalDefaultAccentSensitive { get; set; } + /// + /// A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in + /// CustomEntity, this value will be the default value. + /// + public int? GlobalDefaultFuzzyEditDistance { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkillLanguage.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkillLanguage.cs index 4d6caabb081f..b51b2efb36f0 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomEntityLookupSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input text by CustomEntityLookupSkill. public readonly partial struct CustomEntityLookupSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.Serialization.cs new file mode 100644 index 000000000000..94a694d1b841 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class CustomNormalizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomNormalizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(TokenFilters)) + { + writer.WritePropertyName("tokenFilters"u8); + writer.WriteStartArray(); + foreach (var item in TokenFilters) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(CharFilters)) + { + writer.WritePropertyName("charFilters"u8); + writer.WriteStartArray(); + foreach (var item in CharFilters) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + } + + CustomNormalizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CustomNormalizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCustomNormalizer(document.RootElement, options); + } + + internal static CustomNormalizer DeserializeCustomNormalizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList tokenFilters = default; + IList charFilters = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tokenFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new TokenFilterName(item.GetString())); + } + tokenFilters = array; + continue; + } + if (property.NameEquals("charFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new CharFilterName(item.GetString())); + } + charFilters = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CustomNormalizer(odataType, name, serializedAdditionalRawData, tokenFilters ?? new ChangeTrackingList(), charFilters ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CustomNormalizer)} does not support writing '{options.Format}' format."); + } + } + + CustomNormalizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomNormalizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CustomNormalizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new CustomNormalizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCustomNormalizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.cs new file mode 100644 index 000000000000..7b9e6b7d8fdb --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/CustomNormalizer.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Allows you to configure normalization for filterable, sortable, and facetable + /// fields, which by default operate with strict matching. This is a user-defined + /// configuration consisting of at least one or more filters, which modify the + /// token that is stored. + /// + public partial class CustomNormalizer : LexicalNormalizer + { + /// Initializes a new instance of . + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public CustomNormalizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.CustomNormalizer"; + TokenFilters = new ChangeTrackingList(); + CharFilters = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A list of token filters used to filter out or modify the input token. For + /// example, you can specify a lowercase filter that converts all characters to + /// lowercase. The filters are run in the order in which they are listed. + /// + /// + /// A list of character filters used to prepare input text before it is processed. + /// For instance, they can replace certain characters or symbols. The filters are + /// run in the order in which they are listed. + /// + internal CustomNormalizer(string odataType, string name, IDictionary serializedAdditionalRawData, IList tokenFilters, IList charFilters) : base(odataType, name, serializedAdditionalRawData) + { + TokenFilters = tokenFilters; + CharFilters = charFilters; + } + + /// Initializes a new instance of for deserialization. + internal CustomNormalizer() + { + } + + /// + /// A list of token filters used to filter out or modify the input token. For + /// example, you can specify a lowercase filter that converts all characters to + /// lowercase. The filters are run in the order in which they are listed. + /// + public IList TokenFilters { get; } + /// + /// A list of character filters used to prepare input text before it is processed. + /// For instance, they can replace certain characters or symbols. The filters are + /// run in the order in which they are listed. + /// + public IList CharFilters { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..b1c544a3b063 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownDataChangeDetectionPolicy))] + public partial class DataChangeDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DataChangeDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDataChangeDetectionPolicy(document.RootElement, options); + } + + internal static DataChangeDetectionPolicy DeserializeDataChangeDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy": return HighWaterMarkChangeDetectionPolicy.DeserializeHighWaterMarkChangeDetectionPolicy(element, options); + case "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy": return SqlIntegratedChangeTrackingPolicy.DeserializeSqlIntegratedChangeTrackingPolicy(element, options); + } + } + return UnknownDataChangeDetectionPolicy.DeserializeUnknownDataChangeDetectionPolicy(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + DataChangeDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataChangeDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DataChangeDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataChangeDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.cs new file mode 100644 index 000000000000..ce2197c4a7b8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataChangeDetectionPolicy.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for data change detection policies. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class DataChangeDetectionPolicy + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected DataChangeDetectionPolicy() + { + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal DataChangeDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..2062eba00fe8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownDataDeletionDetectionPolicy))] + public partial class DataDeletionDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DataDeletionDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDataDeletionDetectionPolicy(document.RootElement, options); + } + + internal static DataDeletionDetectionPolicy DeserializeDataDeletionDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy": return NativeBlobSoftDeleteDeletionDetectionPolicy.DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(element, options); + case "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy": return SoftDeleteColumnDeletionDetectionPolicy.DeserializeSoftDeleteColumnDeletionDetectionPolicy(element, options); + } + } + return UnknownDataDeletionDetectionPolicy.DeserializeUnknownDataDeletionDetectionPolicy(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + DataDeletionDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataDeletionDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DataDeletionDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataDeletionDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.cs new file mode 100644 index 000000000000..17b02f507af9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataDeletionDetectionPolicy.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for data deletion detection policies. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class DataDeletionDetectionPolicy + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected DataDeletionDetectionPolicy() + { + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal DataDeletionDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.Serialization.cs new file mode 100644 index 000000000000..7a9d89c18004 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DataSourceCredentials : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataSourceCredentials)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ConnectionString)) + { + writer.WritePropertyName("connectionString"u8); + writer.WriteStringValue(ConnectionString); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DataSourceCredentials IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataSourceCredentials)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDataSourceCredentials(document.RootElement, options); + } + + internal static DataSourceCredentials DeserializeDataSourceCredentials(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string connectionString = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("connectionString"u8)) + { + connectionString = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DataSourceCredentials(connectionString, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DataSourceCredentials)} does not support writing '{options.Format}' format."); + } + } + + DataSourceCredentials IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataSourceCredentials(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DataSourceCredentials)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DataSourceCredentials FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataSourceCredentials(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.cs new file mode 100644 index 000000000000..df6dc9f3da1b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataSourceCredentials.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents credentials that can be used to connect to a datasource. + public partial class DataSourceCredentials + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public DataSourceCredentials() + { + } + + /// Initializes a new instance of . + /// + /// The connection string for the datasource. Set to `<unchanged>` (with brackets) + /// if you don't want the connection string updated. Set to `<redacted>` if you + /// want to remove the connection string value from the datasource. + /// + /// Keeps track of any properties unknown to the library. + internal DataSourceCredentials(string connectionString, IDictionary serializedAdditionalRawData) + { + ConnectionString = connectionString; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The connection string for the datasource. Set to `<unchanged>` (with brackets) + /// if you don't want the connection string updated. Set to `<redacted>` if you + /// want to remove the connection string value from the datasource. + /// + public string ConnectionString { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataSources.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataSources.cs new file mode 100644 index 000000000000..e351c9f7d5c2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DataSources.cs @@ -0,0 +1,704 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The DataSources sub-client. + public partial class DataSources + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of DataSources for mocking. + protected DataSources() + { + } + + /// Initializes a new instance of DataSources. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal DataSources(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// Creates a new datasource or updates a datasource if it already exists. + /// The name of the datasource. + /// The definition of the datasource to create or update. + /// Ignores cache reset requirements. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string dataSourceName, Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource, bool? skipIndexerResetRequirementForCache = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + Argument.AssertNotNull(dataSource, nameof(dataSource)); + + using RequestContent content = dataSource.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(dataSourceName, content, skipIndexerResetRequirementForCache, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// Creates a new datasource or updates a datasource if it already exists. + /// The name of the datasource. + /// The definition of the datasource to create or update. + /// Ignores cache reset requirements. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string dataSourceName, Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource, bool? skipIndexerResetRequirementForCache = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + Argument.AssertNotNull(dataSource, nameof(dataSource)); + + using RequestContent content = dataSource.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(dataSourceName, content, skipIndexerResetRequirementForCache, matchConditions, context); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new datasource or updates a datasource if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the datasource. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string dataSourceName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(dataSourceName, content, skipIndexerResetRequirementForCache, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new datasource or updates a datasource if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the datasource. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string dataSourceName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(dataSourceName, content, skipIndexerResetRequirementForCache, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a datasource. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the datasource. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string dataSourceName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(dataSourceName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a datasource. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the datasource. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string dataSourceName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(dataSourceName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves a datasource definition. + /// The name of the datasource. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetDataSourceAsync(string dataSourceName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDataSourceAsync(dataSourceName, context).ConfigureAwait(false); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// Retrieves a datasource definition. + /// The name of the datasource. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetDataSource(string dataSourceName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDataSource(dataSourceName, context); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves a datasource definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the datasource. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDataSourceAsync(string dataSourceName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.GetDataSource"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDataSourceRequest(dataSourceName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves a datasource definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the datasource. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDataSource(string dataSourceName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dataSourceName, nameof(dataSourceName)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.GetDataSource"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDataSourceRequest(dataSourceName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists all datasources available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual async Task> GetDataSourcesAsync(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDataSourcesAsync(select, context).ConfigureAwait(false); + return Response.FromValue(ListDataSourcesResult.FromResponse(response), response); + } + + /// Lists all datasources available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual Response GetDataSources(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDataSources(select, context); + return Response.FromValue(ListDataSourcesResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Lists all datasources available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDataSourcesAsync(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("DataSources.GetDataSources"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDataSourcesRequest(select, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Lists all datasources available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDataSources(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("DataSources.GetDataSources"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDataSourcesRequest(select, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new datasource. + /// The definition of the datasource to create. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(dataSource, nameof(dataSource)); + + using RequestContent content = dataSource.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// Creates a new datasource. + /// The definition of the datasource to create. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(dataSource, nameof(dataSource)); + + using RequestContent content = dataSource.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new datasource. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new datasource. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("DataSources.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateOrUpdateRequest(string dataSourceName, RequestContent content, bool? skipIndexerResetRequirementForCache, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/datasources('", false); + uri.AppendPath(dataSourceName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (skipIndexerResetRequirementForCache != null) + { + uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); + } + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string dataSourceName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/datasources('", false); + uri.AppendPath(dataSourceName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetDataSourceRequest(string dataSourceName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/datasources('", false); + uri.AppendPath(dataSourceName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetDataSourcesRequest(string select, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/datasources", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (select != null) + { + uri.AppendQuery("$select", select, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/datasources", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DataSourcesRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/DataSourcesRestClient.cs deleted file mode 100644 index b962d396a758..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/DataSourcesRestClient.cs +++ /dev/null @@ -1,425 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents -{ - internal partial class DataSourcesRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of DataSourcesRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public DataSourcesRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCreateOrUpdateRequest(string dataSourceName, SearchIndexerDataSourceConnection dataSource, string ifMatch, string ifNoneMatch, bool? skipIndexerResetRequirementForCache) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/datasources('", false); - uri.AppendPath(dataSourceName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - if (skipIndexerResetRequirementForCache != null) - { - uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); - } - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(dataSource); - request.Content = content; - return message; - } - - /// Creates a new datasource or updates a datasource if it already exists. - /// The name of the datasource to create or update. - /// The definition of the datasource to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string dataSourceName, SearchIndexerDataSourceConnection dataSource, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - if (dataSource == null) - { - throw new ArgumentNullException(nameof(dataSource)); - } - - using var message = CreateCreateOrUpdateRequest(dataSourceName, dataSource, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexerDataSourceConnection value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new datasource or updates a datasource if it already exists. - /// The name of the datasource to create or update. - /// The definition of the datasource to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string dataSourceName, SearchIndexerDataSourceConnection dataSource, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - if (dataSource == null) - { - throw new ArgumentNullException(nameof(dataSource)); - } - - using var message = CreateCreateOrUpdateRequest(dataSourceName, dataSource, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexerDataSourceConnection value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string dataSourceName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/datasources('", false); - uri.AppendPath(dataSourceName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes a datasource. - /// The name of the datasource to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string dataSourceName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - - using var message = CreateDeleteRequest(dataSourceName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes a datasource. - /// The name of the datasource to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string dataSourceName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - - using var message = CreateDeleteRequest(dataSourceName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string dataSourceName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/datasources('", false); - uri.AppendPath(dataSourceName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves a datasource definition. - /// The name of the datasource to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string dataSourceName, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - - using var message = CreateGetRequest(dataSourceName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerDataSourceConnection value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves a datasource definition. - /// The name of the datasource to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string dataSourceName, CancellationToken cancellationToken = default) - { - if (dataSourceName == null) - { - throw new ArgumentNullException(nameof(dataSourceName)); - } - - using var message = CreateGetRequest(dataSourceName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerDataSourceConnection value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest(string select) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/datasources", false); - if (select != null) - { - uri.AppendQuery("$select", select, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Lists all datasources available for a search service. - /// Selects which top-level properties of the data sources to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public async Task> ListAsync(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListDataSourcesResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListDataSourcesResult.DeserializeListDataSourcesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Lists all datasources available for a search service. - /// Selects which top-level properties of the data sources to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public Response List(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListDataSourcesResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListDataSourcesResult.DeserializeListDataSourcesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateRequest(SearchIndexerDataSourceConnection dataSource) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/datasources", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(dataSource); - request.Content = content; - return message; - } - - /// Creates a new datasource. - /// The definition of the datasource to create. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SearchIndexerDataSourceConnection dataSource, CancellationToken cancellationToken = default) - { - if (dataSource == null) - { - throw new ArgumentNullException(nameof(dataSource)); - } - - using var message = CreateCreateRequest(dataSource); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SearchIndexerDataSourceConnection value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new datasource. - /// The definition of the datasource to create. - /// The cancellation token to use. - /// is null. - public Response Create(SearchIndexerDataSourceConnection dataSource, CancellationToken cancellationToken = default) - { - if (dataSource == null) - { - throw new ArgumentNullException(nameof(dataSource)); - } - - using var message = CreateCreateRequest(dataSource); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SearchIndexerDataSourceConnection value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.Serialization.cs new file mode 100644 index 000000000000..e5d6e374a653 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(QueryRewrites)) + { + writer.WritePropertyName("queryRewrites"u8); + writer.WriteObjectValue(QueryRewrites, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDebugInfo(document.RootElement, options); + } + + internal static DebugInfo DeserializeDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + QueryRewritesDebugInfo queryRewrites = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("queryRewrites"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + queryRewrites = QueryRewritesDebugInfo.DeserializeQueryRewritesDebugInfo(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DebugInfo(queryRewrites, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DebugInfo)} does not support writing '{options.Format}' format."); + } + } + + DebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.cs new file mode 100644 index 000000000000..23b69beb88e1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DebugInfo.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains debugging information that can be used to further explore your search + /// results. + /// + public partial class DebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal DebugInfo() + { + } + + /// Initializes a new instance of . + /// Contains debugging information specific to query rewrites. + /// Keeps track of any properties unknown to the library. + internal DebugInfo(QueryRewritesDebugInfo queryRewrites, IDictionary serializedAdditionalRawData) + { + QueryRewrites = queryRewrites; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Contains debugging information specific to query rewrites. + public QueryRewritesDebugInfo QueryRewrites { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.Serialization.cs new file mode 100644 index 000000000000..82c59a7486a2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DefaultCognitiveServicesAccount : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DefaultCognitiveServicesAccount)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + DefaultCognitiveServicesAccount IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DefaultCognitiveServicesAccount)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDefaultCognitiveServicesAccount(document.RootElement, options); + } + + internal static DefaultCognitiveServicesAccount DeserializeDefaultCognitiveServicesAccount(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + string description = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DefaultCognitiveServicesAccount(odataType, description, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DefaultCognitiveServicesAccount)} does not support writing '{options.Format}' format."); + } + } + + DefaultCognitiveServicesAccount IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDefaultCognitiveServicesAccount(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DefaultCognitiveServicesAccount)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new DefaultCognitiveServicesAccount FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDefaultCognitiveServicesAccount(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.cs new file mode 100644 index 000000000000..e054ba3a8b60 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DefaultCognitiveServicesAccount.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// An empty object that represents the default Azure AI service resource for a + /// skillset. + /// + public partial class DefaultCognitiveServicesAccount : CognitiveServicesAccount + { + /// Initializes a new instance of . + public DefaultCognitiveServicesAccount() + { + OdataType = "#Microsoft.Azure.Search.DefaultCognitiveServices"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + internal DefaultCognitiveServicesAccount(string odataType, string description, IDictionary serializedAdditionalRawData) : base(odataType, description, serializedAdditionalRawData) + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.Serialization.cs new file mode 100644 index 000000000000..ee922c77fae9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.Serialization.cs @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DictionaryDecompounderTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DictionaryDecompounderTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("wordList"u8); + writer.WriteStartArray(); + foreach (var item in WordList) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(MinWordSize)) + { + writer.WritePropertyName("minWordSize"u8); + writer.WriteNumberValue(MinWordSize.Value); + } + if (Optional.IsDefined(MinSubwordSize)) + { + writer.WritePropertyName("minSubwordSize"u8); + writer.WriteNumberValue(MinSubwordSize.Value); + } + if (Optional.IsDefined(MaxSubwordSize)) + { + writer.WritePropertyName("maxSubwordSize"u8); + writer.WriteNumberValue(MaxSubwordSize.Value); + } + if (Optional.IsDefined(OnlyLongestMatch)) + { + writer.WritePropertyName("onlyLongestMatch"u8); + writer.WriteBooleanValue(OnlyLongestMatch.Value); + } + } + + DictionaryDecompounderTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DictionaryDecompounderTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDictionaryDecompounderTokenFilter(document.RootElement, options); + } + + internal static DictionaryDecompounderTokenFilter DeserializeDictionaryDecompounderTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList wordList = default; + int? minWordSize = default; + int? minSubwordSize = default; + int? maxSubwordSize = default; + bool? onlyLongestMatch = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("wordList"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + wordList = array; + continue; + } + if (property.NameEquals("minWordSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minWordSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("minSubwordSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minSubwordSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxSubwordSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxSubwordSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("onlyLongestMatch"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + onlyLongestMatch = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DictionaryDecompounderTokenFilter( + odataType, + name, + serializedAdditionalRawData, + wordList, + minWordSize, + minSubwordSize, + maxSubwordSize, + onlyLongestMatch); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DictionaryDecompounderTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + DictionaryDecompounderTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDictionaryDecompounderTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DictionaryDecompounderTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new DictionaryDecompounderTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDictionaryDecompounderTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.cs new file mode 100644 index 000000000000..0822f2ca0ed3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DictionaryDecompounderTokenFilter.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Decomposes compound words found in many Germanic languages. This token filter + /// is implemented using Apache Lucene. + /// + public partial class DictionaryDecompounderTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// The list of words to match against. + /// or is null. + public DictionaryDecompounderTokenFilter(string name, IEnumerable wordList) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(wordList, nameof(wordList)); + + OdataType = "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; + WordList = wordList.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The list of words to match against. + /// + /// The minimum word size. Only words longer than this get processed. Default is 5. + /// Maximum is 300. + /// + /// + /// The minimum subword size. Only subwords longer than this are outputted. Default + /// is 2. Maximum is 300. + /// + /// + /// The maximum subword size. Only subwords shorter than this are outputted. + /// Default is 15. Maximum is 300. + /// + /// + /// A value indicating whether to add only the longest matching subword to the + /// output. Default is false. + /// + internal DictionaryDecompounderTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList wordList, int? minWordSize, int? minSubwordSize, int? maxSubwordSize, bool? onlyLongestMatch) : base(odataType, name, serializedAdditionalRawData) + { + WordList = wordList; + MinWordSize = minWordSize; + MinSubwordSize = minSubwordSize; + MaxSubwordSize = maxSubwordSize; + OnlyLongestMatch = onlyLongestMatch; + } + + /// Initializes a new instance of for deserialization. + internal DictionaryDecompounderTokenFilter() + { + } + + /// The list of words to match against. + public IList WordList { get; } + /// + /// The minimum word size. Only words longer than this get processed. Default is 5. + /// Maximum is 300. + /// + public int? MinWordSize { get; set; } + /// + /// The minimum subword size. Only subwords longer than this are outputted. Default + /// is 2. Maximum is 300. + /// + public int? MinSubwordSize { get; set; } + /// + /// The maximum subword size. Only subwords shorter than this are outputted. + /// Default is 15. Maximum is 300. + /// + public int? MaxSubwordSize { get; set; } + /// + /// A value indicating whether to add only the longest matching subword to the + /// output. Default is false. + /// + public bool? OnlyLongestMatch { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.Serialization.cs new file mode 100644 index 000000000000..0a530377d00c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DistanceScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DistanceScoringFunction)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("distance"u8); + writer.WriteObjectValue(Parameters, options); + } + + DistanceScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DistanceScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDistanceScoringFunction(document.RootElement, options); + } + + internal static DistanceScoringFunction DeserializeDistanceScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DistanceScoringParameters distance = default; + string fieldName = default; + double boost = default; + ScoringFunctionInterpolation? interpolation = default; + string type = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("distance"u8)) + { + distance = DistanceScoringParameters.DeserializeDistanceScoringParameters(property.Value, options); + continue; + } + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("boost"u8)) + { + boost = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("interpolation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + interpolation = new ScoringFunctionInterpolation(property.Value.GetString()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DistanceScoringFunction( + fieldName, + boost, + interpolation, + type, + serializedAdditionalRawData, + distance); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DistanceScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + DistanceScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDistanceScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DistanceScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new DistanceScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDistanceScoringFunction(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.cs new file mode 100644 index 000000000000..e9753535925a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringFunction.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a function that boosts scores based on distance from a geographic + /// location. + /// + public partial class DistanceScoringFunction : ScoringFunction + { + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// Parameter values for the distance scoring function. + /// or is null. + public DistanceScoringFunction(string fieldName, double boost, DistanceScoringParameters parameters) : base(fieldName, boost) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + Argument.AssertNotNull(parameters, nameof(parameters)); + + Type = "distance"; + Parameters = parameters; + } + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + /// Parameter values for the distance scoring function. + internal DistanceScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData, DistanceScoringParameters parameters) : base(fieldName, boost, interpolation, type, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal DistanceScoringFunction() + { + } + + /// Parameter values for the distance scoring function. + public DistanceScoringParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.Serialization.cs new file mode 100644 index 000000000000..ae0496d49a5b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DistanceScoringParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DistanceScoringParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("referencePointParameter"u8); + writer.WriteStringValue(ReferencePointParameter); + writer.WritePropertyName("boostingDistance"u8); + writer.WriteNumberValue(BoostingDistance); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DistanceScoringParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DistanceScoringParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDistanceScoringParameters(document.RootElement, options); + } + + internal static DistanceScoringParameters DeserializeDistanceScoringParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string referencePointParameter = default; + double boostingDistance = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("referencePointParameter"u8)) + { + referencePointParameter = property.Value.GetString(); + continue; + } + if (property.NameEquals("boostingDistance"u8)) + { + boostingDistance = property.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DistanceScoringParameters(referencePointParameter, boostingDistance, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DistanceScoringParameters)} does not support writing '{options.Format}' format."); + } + } + + DistanceScoringParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDistanceScoringParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DistanceScoringParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DistanceScoringParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDistanceScoringParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.cs new file mode 100644 index 000000000000..68690ab3069e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DistanceScoringParameters.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Provides parameter values to a distance scoring function. + public partial class DistanceScoringParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the parameter passed in search queries to specify the reference + /// location. + /// + /// + /// The distance in kilometers from the reference location where the boosting range + /// ends. + /// + /// is null. + public DistanceScoringParameters(string referencePointParameter, double boostingDistance) + { + Argument.AssertNotNull(referencePointParameter, nameof(referencePointParameter)); + + ReferencePointParameter = referencePointParameter; + BoostingDistance = boostingDistance; + } + + /// Initializes a new instance of . + /// + /// The name of the parameter passed in search queries to specify the reference + /// location. + /// + /// + /// The distance in kilometers from the reference location where the boosting range + /// ends. + /// + /// Keeps track of any properties unknown to the library. + internal DistanceScoringParameters(string referencePointParameter, double boostingDistance, IDictionary serializedAdditionalRawData) + { + ReferencePointParameter = referencePointParameter; + BoostingDistance = boostingDistance; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DistanceScoringParameters() + { + } + + /// + /// The name of the parameter passed in search queries to specify the reference + /// location. + /// + public string ReferencePointParameter { get; set; } + /// + /// The distance in kilometers from the reference location where the boosting range + /// ends. + /// + public double BoostingDistance { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/Aliases.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Aliases.xml new file mode 100644 index 000000000000..5b57dd3b7197 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Aliases.xml @@ -0,0 +1,297 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +SearchAlias @alias = new SearchAlias("tempalias", new string[] { "preview-test" }) +{ + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateAsync(@alias); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +SearchAlias @alias = new SearchAlias("tempalias", new string[] { "preview-test" }) +{ + ETag = "0x1234568AE7E58A1", +}; +Response response = client.Create(@alias); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempalias", + ["indexes"] = new object[] + { + "preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempalias", + ["indexes"] = new object[] + { + "preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +SearchAlias @alias = new SearchAlias("myalias", new string[] { "preview-test" }) +{ + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateOrUpdateAsync("myalias", @alias); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +SearchAlias @alias = new SearchAlias("myalias", new string[] { "preview-test" }) +{ + ETag = "0x1234568AE7E58A1", +}; +Response response = client.CreateOrUpdate("myalias", @alias); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myalias", + ["indexes"] = new object[] + { + "preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateOrUpdateAsync("myalias", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myalias", + ["indexes"] = new object[] + { + "preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.CreateOrUpdate("myalias", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = await client.DeleteAsync("tempalias"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = client.Delete("tempalias"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetAliasAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = await client.GetAliasAsync("myalias"); +]]> + + + +This sample shows how to call GetAlias. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = client.GetAlias("myalias"); +]]> + + + +This sample shows how to call GetAliasAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = await client.GetAliasAsync("myalias", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call GetAlias and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +Response response = client.GetAlias("myalias", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +]]> + + + +This sample shows how to call GetAliasesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +await foreach (SearchAlias item in client.GetAliasesAsync()) +{ +} +]]> + + + +This sample shows how to call GetAliases. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +foreach (SearchAlias item in client.GetAliases()) +{ +} +]]> + + + +This sample shows how to call GetAliasesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +await foreach (BinaryData item in client.GetAliasesAsync(null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +} +]]> + + + +This sample shows how to call GetAliases and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + +foreach (BinaryData item in client.GetAliases(null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); +} +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/DataSources.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/DataSources.xml new file mode 100644 index 000000000000..0eb7e702ad82 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/DataSources.xml @@ -0,0 +1,435 @@ + + + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials +{ + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", +}, new SearchIndexerDataContainer("doc-extraction-skillset") +{ + Query = "E2E_Dsat", +}) +{ + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = await client.CreateOrUpdateAsync("tempdatasource", dataSource); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials +{ + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", +}, new SearchIndexerDataContainer("doc-extraction-skillset") +{ + Query = "E2E_Dsat", +}) +{ + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = client.CreateOrUpdate("tempdatasource", dataSource); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = await client.CreateOrUpdateAsync("tempdatasource", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = client.CreateOrUpdate("tempdatasource", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = await client.DeleteAsync("tempdatasource"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = client.Delete("tempdatasource"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetDataSourceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = await client.GetDataSourceAsync("mydocdbdatasource"); +]]> + + + +This sample shows how to call GetDataSource. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = client.GetDataSource("mydocdbdatasource"); +]]> + + + +This sample shows how to call GetDataSourceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = await client.GetDataSourceAsync("mydocdbdatasource", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetDataSource and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = client.GetDataSource("mydocdbdatasource", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetDataSourcesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = await client.GetDataSourcesAsync(); +]]> + + + +This sample shows how to call GetDataSources. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = client.GetDataSources(); +]]> + + + +This sample shows how to call GetDataSourcesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = await client.GetDataSourcesAsync(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetDataSources and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Response response = client.GetDataSources(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials +{ + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", +}, new SearchIndexerDataContainer("doc-extraction-skillset") +{ + Query = "E2E_Dsat", +}) +{ + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = await client.CreateAsync(dataSource); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials +{ + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", +}, new SearchIndexerDataContainer("doc-extraction-skillset") +{ + Query = "E2E_Dsat", +}) +{ + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = client.Create(dataSource); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("type").ToString()); +Console.WriteLine(result.GetProperty("credentials").ToString()); +Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/Documents.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Documents.xml new file mode 100644 index 000000000000..5a83f399a7a1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Documents.xml @@ -0,0 +1,1587 @@ + + + + + +This sample shows how to call CountAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.CountAsync("preview-test"); +]]> + + + +This sample shows how to call Count. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.Count("preview-test"); +]]> + + + +This sample shows how to call CountAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.CountAsync("preview-test", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call Count and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.Count("preview-test", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call SearchGetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SearchGetAsync("myindex"); +]]> +This sample shows how to call SearchGetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SearchGetAsync("myindex"); +]]> + + + +This sample shows how to call SearchGet. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SearchGet("myindex"); +]]> +This sample shows how to call SearchGet. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SearchGet("myindex"); +]]> + + + +This sample shows how to call SearchGetAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SearchGetAsync("myindex", "nice hotels", true, new string[] { "category,count:10,sort:count" }, "rating gt 10", new string[] { "title" }, "", "", 80, new string[] { "search.score() desc", "rating desc" }, "simple", null, "sp", new string[] { "title", "description" }, "any", "global", "mysessionid", new string[] { "docId", "title", "description" }, 100, 10, null, null, null, null, null, null, null, null, null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> +This sample shows how to call SearchGetAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SearchGetAsync("myindex", "how do clouds form", true, null, null, null, "", "", null, null, "semantic", null, null, null, null, null, null, null, null, null, "my-semantic-config", "partial", 780, null, null, null, null, null, null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> + + + +This sample shows how to call SearchGet and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SearchGet("myindex", "nice hotels", true, new string[] { "category,count:10,sort:count" }, "rating gt 10", new string[] { "title" }, "", "", 80, new string[] { "search.score() desc", "rating desc" }, "simple", null, "sp", new string[] { "title", "description" }, "any", "global", "mysessionid", new string[] { "docId", "title", "description" }, 100, 10, null, null, null, null, null, null, null, null, null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> +This sample shows how to call SearchGet and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SearchGet("myindex", "how do clouds form", true, null, null, null, "", "", null, null, "semantic", null, null, null, null, null, null, null, null, null, "my-semantic-config", "partial", 780, null, null, null, null, null, null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> + + + +This sample shows how to call SearchPostAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SearchOptions searchOptions = null; +Response response = await client.SearchPostAsync("preview-test", searchOptions); +]]> +This sample shows how to call SearchPostAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SearchOptions searchOptions = null; +Response response = await client.SearchPostAsync("myindex", searchOptions); +]]> + + + +This sample shows how to call SearchPost. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SearchOptions searchOptions = null; +Response response = client.SearchPost("preview-test", searchOptions); +]]> +This sample shows how to call SearchPost. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SearchOptions searchOptions = null; +Response response = client.SearchPost("myindex", searchOptions); +]]> + + + +This sample shows how to call SearchPostAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + count = true, + facets = new object[] + { + "ownerId", + "price,metric:sum,default:10" + }, + filter = "category eq 'purple' or category eq 'pink'", + highlight = "category", + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 100, + queryType = "semantic", + scoringStatistics = "global", + sessionId = "mysessionid", + scoringParameters = new object[] + { + "categoryTag:desiredCategoryValue" + }, + scoringProfile = "stringFieldBoost", + search = "purple", + searchFields = "id,name,description,category,ownerId", + searchMode = "any", + queryLanguage = "en-us", + speller = "lexicon", + select = "id,name,description,category,ownerId", + skip = 0, + top = 10, + semanticConfiguration = "testconfig", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 5000, + semanticQuery = "find all purple", + answers = "extractive", + captions = "extractive", + queryRewrites = "generative", + vectorQueries = new object[] + { + new + { + vector = new object[] + { + 0F, + 1F, + 2F, + 3F, + 4F, + 5F, + 6F, + 7F, + 8F, + 9F + }, + kind = "vector", + k = 50, + fields = "vector22, vector1b", + oversampling = 20, + weight = 1F, + threshold = new + { + value = 0.984, + kind = "vectorSimilarity", + }, + filterOverride = "ownerId eq 'sam'", + } + }, + vectorFilterMode = "preFilter", + hybridSearch = new + { + maxTextRecallSize = 100, + countAndFacetMode = "countAllResults", + }, +}); +Response response = await client.SearchPostAsync("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> +This sample shows how to call SearchPostAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + count = true, + highlightPostTag = "", + highlightPreTag = "", + queryType = "semantic", + search = "how do clouds form", + semanticConfiguration = "my-semantic-config", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 780, +}); +Response response = await client.SearchPostAsync("myindex", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> + + + +This sample shows how to call SearchPost and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + count = true, + facets = new object[] + { + "ownerId", + "price,metric:sum,default:10" + }, + filter = "category eq 'purple' or category eq 'pink'", + highlight = "category", + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 100, + queryType = "semantic", + scoringStatistics = "global", + sessionId = "mysessionid", + scoringParameters = new object[] + { + "categoryTag:desiredCategoryValue" + }, + scoringProfile = "stringFieldBoost", + search = "purple", + searchFields = "id,name,description,category,ownerId", + searchMode = "any", + queryLanguage = "en-us", + speller = "lexicon", + select = "id,name,description,category,ownerId", + skip = 0, + top = 10, + semanticConfiguration = "testconfig", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 5000, + semanticQuery = "find all purple", + answers = "extractive", + captions = "extractive", + queryRewrites = "generative", + vectorQueries = new object[] + { + new + { + vector = new object[] + { + 0F, + 1F, + 2F, + 3F, + 4F, + 5F, + 6F, + 7F, + 8F, + 9F + }, + kind = "vector", + k = 50, + fields = "vector22, vector1b", + oversampling = 20, + weight = 1F, + threshold = new + { + value = 0.984, + kind = "vectorSimilarity", + }, + filterOverride = "ownerId eq 'sam'", + } + }, + vectorFilterMode = "preFilter", + hybridSearch = new + { + maxTextRecallSize = 100, + countAndFacetMode = "countAllResults", + }, +}); +Response response = client.SearchPost("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> +This sample shows how to call SearchPost and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + count = true, + highlightPostTag = "", + highlightPreTag = "", + queryType = "semantic", + search = "how do clouds form", + semanticConfiguration = "my-semantic-config", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 780, +}); +Response response = client.SearchPost("myindex", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); +]]> + + + +This sample shows how to call GetDocumentAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.GetDocumentAsync("preview-test", "1"); +]]> + + + +This sample shows how to call GetDocument. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.GetDocument("preview-test", "1"); +]]> + + + +This sample shows how to call GetDocumentAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.GetDocumentAsync("preview-test", "1", new string[] { "id", "description", "name", "category", "ownerId" }, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call GetDocument and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.GetDocument("preview-test", "1", new string[] { "id", "description", "name", "category", "ownerId" }, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call SuggestGetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SuggestGetAsync("myindex", "hote", "sg"); +]]> + + + +This sample shows how to call SuggestGet. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SuggestGet("myindex", "hote", "sg"); +]]> + + + +This sample shows how to call SuggestGetAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.SuggestGetAsync("myindex", "hote", "sg", "rating gt 10", false, "", "", 80, new string[] { "search.score() desc", "rating desc" }, new string[] { "title" }, new string[] { "docId", "title", "description" }, 10, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); +]]> + + + +This sample shows how to call SuggestGet and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.SuggestGet("myindex", "hote", "sg", "rating gt 10", false, "", "", 80, new string[] { "search.score() desc", "rating desc" }, new string[] { "title" }, new string[] { "docId", "title", "description" }, 10, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); +]]> + + + +This sample shows how to call SuggestPostAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SuggestOptions suggestOptions = null; +Response response = await client.SuggestPostAsync("preview-test", suggestOptions); +]]> + + + +This sample shows how to call SuggestPost. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +SuggestOptions suggestOptions = null; +Response response = client.SuggestPost("preview-test", suggestOptions); +]]> + + + +This sample shows how to call SuggestPostAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + filter = "ownerId eq 'sam' and id lt '15'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + orderby = "id desc", + search = "p", + searchFields = "category", + select = "id,name,category,ownerId", + suggesterName = "sg", + top = 10, +}); +Response response = await client.SuggestPostAsync("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); +]]> + + + +This sample shows how to call SuggestPost and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + filter = "ownerId eq 'sam' and id lt '15'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + orderby = "id desc", + search = "p", + searchFields = "category", + select = "id,name,category,ownerId", + suggesterName = "sg", + top = 10, +}); +Response response = client.SuggestPost("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); +]]> + + + +This sample shows how to call IndexAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +IndexBatch batch = new IndexBatch(new IndexAction[] +{ + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + } +}); +Response response = await client.IndexAsync("preview-test", batch); +]]> + + + +This sample shows how to call Index. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +IndexBatch batch = new IndexBatch(new IndexAction[] +{ + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + }, + new IndexAction + { + ActionType = IndexActionType.MergeOrUpload, + } +}); +Response response = client.Index("preview-test", batch); +]]> + + + +This sample shows how to call IndexAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + value = new object[] + { + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + } + }, +}); +Response response = await client.IndexAsync("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("key").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("statusCode").ToString()); +]]> + + + +This sample shows how to call Index and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + value = new object[] + { + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + }, + new Dictionary + { + ["@search.action"] = "mergeOrUpload" + } + }, +}); +Response response = client.Index("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("key").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("statusCode").ToString()); +]]> + + + +This sample shows how to call AutocompleteGetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.AutocompleteGetAsync("myindex", "washington medic", "sg"); +]]> + + + +This sample shows how to call AutocompleteGet. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.AutocompleteGet("myindex", "washington medic", "sg"); +]]> + + + +This sample shows how to call AutocompleteGetAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = await client.AutocompleteGetAsync("myindex", "washington medic", "sg", "oneTerm", null, false, "", "", 80, new string[] { "title", "description" }, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); +]]> + + + +This sample shows how to call AutocompleteGet and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +Response response = client.AutocompleteGet("myindex", "washington medic", "sg", "oneTerm", null, false, "", "", 80, new string[] { "title", "description" }, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); +]]> + + + +This sample shows how to call AutocompletePostAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +AutocompleteOptions autocompleteOptions = null; +Response response = await client.AutocompletePostAsync("preview-test", autocompleteOptions); +]]> + + + +This sample shows how to call AutocompletePost. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +AutocompleteOptions autocompleteOptions = null; +Response response = client.AutocompletePost("preview-test", autocompleteOptions); +]]> + + + +This sample shows how to call AutocompletePostAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + search = "p", + autocompleteMode = "oneTerm", + filter = "ownerId ne '1'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + searchFields = "category, ownerId", + suggesterName = "sg", + top = 10, +}); +Response response = await client.AutocompletePostAsync("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); +]]> + + + +This sample shows how to call AutocompletePost and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + +using RequestContent content = RequestContent.Create(new +{ + search = "p", + autocompleteMode = "oneTerm", + filter = "ownerId ne '1'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + searchFields = "category, ownerId", + suggesterName = "sg", + top = 10, +}); +Response response = client.AutocompletePost("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexers.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexers.xml new file mode 100644 index 000000000000..a18e977822a5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexers.xml @@ -0,0 +1,973 @@ + + + + + +This sample shows how to call ResetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.ResetAsync("myindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Reset. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.Reset("myindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call ResetDocsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.ResetDocsAsync("myindexer"); +]]> + + + +This sample shows how to call ResetDocs. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.ResetDocs("myindexer"); +]]> + + + +This sample shows how to call ResetDocsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new +{ + documentKeys = new object[] + { + "1", + "2", + "3" + }, +}); +Response response = await client.ResetDocsAsync("myindexer", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call ResetDocs. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new +{ + documentKeys = new object[] + { + "1", + "2", + "3" + }, +}); +Response response = client.ResetDocs("myindexer", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call RunAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.RunAsync("myindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Run. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.Run("myindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") +{ + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToOne, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + OutputFieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = await client.CreateOrUpdateAsync("myindexer", indexer); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") +{ + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToOne, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + OutputFieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = client.CreateOrUpdate("myindexer", indexer); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToOne", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["outputFieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = await client.CreateOrUpdateAsync("myindexer", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToOne", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["outputFieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = client.CreateOrUpdate("myindexer", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.DeleteAsync("tempindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.Delete("tempindexer"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetIndexerAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetIndexerAsync("myindexer"); +]]> + + + +This sample shows how to call GetIndexer. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetIndexer("myindexer"); +]]> + + + +This sample shows how to call GetIndexerAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetIndexerAsync("myindexer", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call GetIndexer and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetIndexer("myindexer", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call GetIndexersAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetIndexersAsync(); +]]> + + + +This sample shows how to call GetIndexers. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetIndexers(); +]]> + + + +This sample shows how to call GetIndexersAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetIndexersAsync("*", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call GetIndexers and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetIndexers("*", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") +{ + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToMany, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + OutputFieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + Cache = new SearchIndexerCache + { + StorageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + EnableReprocessing = true, + }, +}; +Response response = await client.CreateAsync(indexer); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") +{ + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToMany, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + OutputFieldMappings = {new FieldMapping("/document") + { + TargetFieldName = "name", + MappingFunction = new FieldMappingFunction("base64Encode"), + }}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + Cache = new SearchIndexerCache + { + StorageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + EnableReprocessing = true, + }, +}; +Response response = client.Create(indexer); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToMany", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["outputFieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["cache"] = new + { + storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + enableReprocessing = true, + } +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToMany", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["outputFieldMappings"] = new object[] + { + new + { + sourceFieldName = "/document", + targetFieldName = "name", + mappingFunction = new + { + name = "base64Encode", + }, + } + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["cache"] = new + { + storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + enableReprocessing = true, + } +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("dataSourceName").ToString()); +Console.WriteLine(result.GetProperty("targetIndexName").ToString()); +]]> + + + +This sample shows how to call GetStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetStatusAsync("myindexer"); +]]> + + + +This sample shows how to call GetStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetStatus("myindexer"); +]]> + + + +This sample shows how to call GetStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = await client.GetStatusAsync("myindexer", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("errorMessage").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("warnings")[0].GetProperty("message").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsProcessed").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsFailed").ToString()); +Console.WriteLine(result.GetProperty("limits").ToString()); +]]> + + + +This sample shows how to call GetStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + +Response response = client.GetStatus("myindexer", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("errorMessage").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("warnings")[0].GetProperty("message").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsProcessed").ToString()); +Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsFailed").ToString()); +Console.WriteLine(result.GetProperty("limits").ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexes.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexes.xml new file mode 100644 index 000000000000..c5fe44fcc80b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Indexes.xml @@ -0,0 +1,2747 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] +{ + new SearchField("id", SearchFieldDataType.String) + { + Key = true, + Sortable = true, + }, + new SearchField("vector1", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 20, + VectorSearchProfileName = "config1", + }, + new SearchField("vector1b", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector2", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector3", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector22", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector4", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 32, + VectorSearchProfileName = "config4", + }, + new SearchField("name", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("description", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.StandardLucene, + }, + new SearchField("category", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("ownerId", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("price", SearchFieldDataType.Double) + { + Retrievable = true, + Filterable = true, + Sortable = true, + Facetable = true, + } +}) +{ + ScoringProfiles = {new ScoringProfile("stringFieldBoost") + { + TextWeights = new TextWeights(new Dictionary + { + ["name"] = 3, + ["description"] = 1, + ["category"] = 2, + ["ownerId"] = 1 + }), + Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, + }}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields + { + TitleField = new SemanticField("category"), + ContentFields = {new SemanticField("description")}, + KeywordsFields = {new SemanticField("ownerId")}, + }) + { + FlightingOptIn = true, + }}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") + { + VectorizerName = "openai", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config2", "euclidean") + { + VectorizerName = "custom-web-api", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config3", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQC", + }, new VectorSearchProfile("config4", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQWithoutOriginals", + }}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Cosine, + }, + }, new HnswAlgorithmConfiguration("euclidean") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Euclidean, + }, + }, new HnswAlgorithmConfiguration("dotProduct") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.DotProduct, + }, + }}, + Vectorizers = {new AzureOpenAIVectorizer("openai") + { + Parameters = new AzureOpenAIVectorizerParameters + { + ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), + DeploymentName = "model", + ApiKey = "api-key", + ModelName = AzureOpenAIModelName.TextEmbedding3Large, + }, + }, new WebApiVectorizer("custom-web-api") + { + WebApiParameters = new WebApiVectorizerParameters + { + Url = new Uri("https://my-custom-endpoint.org/"), + HttpHeaders = + { + ["header1"] = "value1", + ["header2"] = "value2" + }, + HttpMethod = "POST", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + }, new AzureMachineLearningVectorizer("aml") + { + AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) + { + ResourceId = "aml resource id", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + Region = "aml region", + ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, + }, + }}, + Compressions = {new ScalarQuantizationCompression("mySQ8") + { + Parameters = new ScalarQuantizationParameters + { + QuantizedDataType = VectorSearchCompressionTarget.Int8, + }, + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQC") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQWithoutOriginals") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, + }, + TruncationDimension = 2, + }}, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateAsync(index); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] +{ + new SearchField("id", SearchFieldDataType.String) + { + Key = true, + Sortable = true, + }, + new SearchField("vector1", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 20, + VectorSearchProfileName = "config1", + }, + new SearchField("vector1b", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector2", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector3", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector22", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector4", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 32, + VectorSearchProfileName = "config4", + }, + new SearchField("name", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("description", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.StandardLucene, + }, + new SearchField("category", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("ownerId", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("price", SearchFieldDataType.Double) + { + Retrievable = true, + Filterable = true, + Sortable = true, + Facetable = true, + } +}) +{ + ScoringProfiles = {new ScoringProfile("stringFieldBoost") + { + TextWeights = new TextWeights(new Dictionary + { + ["name"] = 3, + ["description"] = 1, + ["category"] = 2, + ["ownerId"] = 1 + }), + Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, + }}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields + { + TitleField = new SemanticField("category"), + ContentFields = {new SemanticField("description")}, + KeywordsFields = {new SemanticField("ownerId")}, + }) + { + FlightingOptIn = true, + }}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") + { + VectorizerName = "openai", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config2", "euclidean") + { + VectorizerName = "custom-web-api", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config3", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQC", + }, new VectorSearchProfile("config4", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQWithoutOriginals", + }}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Cosine, + }, + }, new HnswAlgorithmConfiguration("euclidean") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Euclidean, + }, + }, new HnswAlgorithmConfiguration("dotProduct") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.DotProduct, + }, + }}, + Vectorizers = {new AzureOpenAIVectorizer("openai") + { + Parameters = new AzureOpenAIVectorizerParameters + { + ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), + DeploymentName = "model", + ApiKey = "api-key", + ModelName = AzureOpenAIModelName.TextEmbedding3Large, + }, + }, new WebApiVectorizer("custom-web-api") + { + WebApiParameters = new WebApiVectorizerParameters + { + Url = new Uri("https://my-custom-endpoint.org/"), + HttpHeaders = + { + ["header1"] = "value1", + ["header2"] = "value2" + }, + HttpMethod = "POST", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + }, new AzureMachineLearningVectorizer("aml") + { + AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) + { + ResourceId = "aml resource id", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + Region = "aml region", + ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, + }, + }}, + Compressions = {new ScalarQuantizationCompression("mySQ8") + { + Parameters = new ScalarQuantizationParameters + { + QuantizedDataType = VectorSearchCompressionTarget.Int8, + }, + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQC") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQWithoutOriginals") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, + }, + TruncationDimension = 2, + }}, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = client.Create(index); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { + new + { + name = "id", + type = "Edm.String", + key = true, + sortable = true, + }, + new + { + name = "vector1", + retrievable = true, + searchable = true, + dimensions = 20, + vectorSearchProfile = "config1", + }, + new + { + name = "vector1b", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector2", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector3", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector22", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector4", + retrievable = true, + searchable = true, + dimensions = 32, + vectorSearchProfile = "config4", + }, + new + { + name = "name", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "description", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "standard.lucene", + }, + new + { + name = "category", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "ownerId", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "price", + type = "Edm.Double", + retrievable = true, + filterable = true, + sortable = true, + facetable = true, + } + }, + ["scoringProfiles"] = new object[] + { + new + { + name = "stringFieldBoost", + text = new + { + weights = new + { + name = 3, + description = 1, + category = 2, + ownerId = 1, + }, + }, + functions = new object[] + { + new + { + tag = new + { + tagsParameter = "categoryTag", + }, + type = "tag", + fieldName = "category", + boost = 2, + } + }, + } + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { + "https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { + new + { + name = "sg", + searchMode = "analyzingInfixMatching", + sourceFields = new object[] + { + "category", + "ownerId" + }, + } + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { + new + { + name = "testconfig", + prioritizedFields = new + { + titleField = new + { + fieldName = "category", + }, + prioritizedContentFields = new object[] + { + new + { + fieldName = "description", + } + }, + prioritizedKeywordsFields = new object[] + { + new + { + fieldName = "ownerId", + } + }, + }, + flightingOptIn = true, + } + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { + new + { + name = "config1", + algorithm = "cosine", + vectorizer = "openai", + compression = "mySQ8", + }, + new + { + name = "config2", + algorithm = "euclidean", + vectorizer = "custom-web-api", + compression = "mySQ8", + }, + new + { + name = "config3", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQC", + }, + new + { + name = "config4", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQWithoutOriginals", + } + }, + algorithms = new object[] + { + new + { + hnswParameters = new + { + metric = "cosine", + }, + name = "cosine", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "euclidean", + }, + name = "euclidean", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "dotProduct", + }, + name = "dotProduct", + kind = "hnsw", + } + }, + vectorizers = new object[] + { + new + { + azureOpenAIParameters = new + { + resourceUri = "https://test-sample.openai.azure.com/", + deploymentId = "model", + apiKey = "api-key", + modelName = "text-embedding-3-large", + }, + name = "openai", + kind = "azureOpenAI", + }, + new + { + customWebApiParameters = new + { + uri = "https://my-custom-endpoint.org/", + httpHeaders = new + { + header1 = "value1", + header2 = "value2", + }, + httpMethod = "POST", + timeout = "PT1M", + authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + name = "custom-web-api", + kind = "customWebApi", + }, + new + { + amlParameters = new + { + uri = "https://my-custom-endpoint.org/", + resourceId = "aml resource id", + timeout = "PT1M", + region = "aml region", + modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", + }, + name = "aml", + kind = "aml", + } + }, + compressions = new object[] + { + new + { + scalarQuantizationParameters = new + { + quantizedDataType = "int8", + }, + name = "mySQ8", + kind = "scalarQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQC", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQWithoutOriginals", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "discardOriginals", + }, + truncationDimension = 2, + } + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { + new + { + name = "id", + type = "Edm.String", + key = true, + sortable = true, + }, + new + { + name = "vector1", + retrievable = true, + searchable = true, + dimensions = 20, + vectorSearchProfile = "config1", + }, + new + { + name = "vector1b", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector2", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector3", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector22", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector4", + retrievable = true, + searchable = true, + dimensions = 32, + vectorSearchProfile = "config4", + }, + new + { + name = "name", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "description", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "standard.lucene", + }, + new + { + name = "category", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "ownerId", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "price", + type = "Edm.Double", + retrievable = true, + filterable = true, + sortable = true, + facetable = true, + } + }, + ["scoringProfiles"] = new object[] + { + new + { + name = "stringFieldBoost", + text = new + { + weights = new + { + name = 3, + description = 1, + category = 2, + ownerId = 1, + }, + }, + functions = new object[] + { + new + { + tag = new + { + tagsParameter = "categoryTag", + }, + type = "tag", + fieldName = "category", + boost = 2, + } + }, + } + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { + "https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { + new + { + name = "sg", + searchMode = "analyzingInfixMatching", + sourceFields = new object[] + { + "category", + "ownerId" + }, + } + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { + new + { + name = "testconfig", + prioritizedFields = new + { + titleField = new + { + fieldName = "category", + }, + prioritizedContentFields = new object[] + { + new + { + fieldName = "description", + } + }, + prioritizedKeywordsFields = new object[] + { + new + { + fieldName = "ownerId", + } + }, + }, + flightingOptIn = true, + } + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { + new + { + name = "config1", + algorithm = "cosine", + vectorizer = "openai", + compression = "mySQ8", + }, + new + { + name = "config2", + algorithm = "euclidean", + vectorizer = "custom-web-api", + compression = "mySQ8", + }, + new + { + name = "config3", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQC", + }, + new + { + name = "config4", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQWithoutOriginals", + } + }, + algorithms = new object[] + { + new + { + hnswParameters = new + { + metric = "cosine", + }, + name = "cosine", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "euclidean", + }, + name = "euclidean", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "dotProduct", + }, + name = "dotProduct", + kind = "hnsw", + } + }, + vectorizers = new object[] + { + new + { + azureOpenAIParameters = new + { + resourceUri = "https://test-sample.openai.azure.com/", + deploymentId = "model", + apiKey = "api-key", + modelName = "text-embedding-3-large", + }, + name = "openai", + kind = "azureOpenAI", + }, + new + { + customWebApiParameters = new + { + uri = "https://my-custom-endpoint.org/", + httpHeaders = new + { + header1 = "value1", + header2 = "value2", + }, + httpMethod = "POST", + timeout = "PT1M", + authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + name = "custom-web-api", + kind = "customWebApi", + }, + new + { + amlParameters = new + { + uri = "https://my-custom-endpoint.org/", + resourceId = "aml resource id", + timeout = "PT1M", + region = "aml region", + modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", + }, + name = "aml", + kind = "aml", + } + }, + compressions = new object[] + { + new + { + scalarQuantizationParameters = new + { + quantizedDataType = "int8", + }, + name = "mySQ8", + kind = "scalarQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQC", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQWithoutOriginals", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "discardOriginals", + }, + truncationDimension = 2, + } + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] +{ + new SearchField("id", SearchFieldDataType.String) + { + Key = true, + Sortable = true, + }, + new SearchField("vector1", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 20, + VectorSearchProfileName = "config1", + }, + new SearchField("vector1b", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector2", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector3", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector22", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector4", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 32, + VectorSearchProfileName = "config4", + }, + new SearchField("name", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("description", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.StandardLucene, + }, + new SearchField("category", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("ownerId", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("price", SearchFieldDataType.Double) + { + Retrievable = true, + Filterable = true, + Sortable = true, + Facetable = true, + } +}) +{ + ScoringProfiles = {new ScoringProfile("stringFieldBoost") + { + TextWeights = new TextWeights(new Dictionary + { + ["name"] = 3, + ["description"] = 1, + ["category"] = 2, + ["ownerId"] = 1 + }), + Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, + }}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields + { + TitleField = new SemanticField("category"), + ContentFields = {new SemanticField("description")}, + KeywordsFields = {new SemanticField("ownerId")}, + }) + { + FlightingOptIn = true, + }}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") + { + VectorizerName = "openai", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config2", "euclidean") + { + VectorizerName = "custom-web-api", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config3", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQC", + }, new VectorSearchProfile("config4", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQWithoutOriginals", + }}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Cosine, + }, + }, new HnswAlgorithmConfiguration("euclidean") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Euclidean, + }, + }, new HnswAlgorithmConfiguration("dotProduct") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.DotProduct, + }, + }}, + Vectorizers = {new AzureOpenAIVectorizer("openai") + { + Parameters = new AzureOpenAIVectorizerParameters + { + ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), + DeploymentName = "model", + ApiKey = "api-key", + ModelName = AzureOpenAIModelName.TextEmbedding3Large, + }, + }, new WebApiVectorizer("custom-web-api") + { + WebApiParameters = new WebApiVectorizerParameters + { + Url = new Uri("https://my-custom-endpoint.org/"), + HttpHeaders = + { + ["header1"] = "value1", + ["header2"] = "value2" + }, + HttpMethod = "POST", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + }, new AzureMachineLearningVectorizer("aml") + { + AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) + { + ResourceId = "aml resource id", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + Region = "aml region", + ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, + }, + }}, + Compressions = {new ScalarQuantizationCompression("mySQ8") + { + Parameters = new ScalarQuantizationParameters + { + QuantizedDataType = VectorSearchCompressionTarget.Int8, + }, + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQC") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQWithoutOriginals") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, + }, + TruncationDimension = 2, + }}, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateOrUpdateAsync("temp-preview-test", index); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] +{ + new SearchField("id", SearchFieldDataType.String) + { + Key = true, + Sortable = true, + }, + new SearchField("vector1", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 20, + VectorSearchProfileName = "config1", + }, + new SearchField("vector1b", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector2", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector3", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 5, + VectorSearchProfileName = "config3", + }, + new SearchField("vector22", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 10, + VectorSearchProfileName = "config2", + }, + new SearchField("vector4", default) + { + Retrievable = true, + Searchable = true, + VectorSearchDimensions = 32, + VectorSearchProfileName = "config4", + }, + new SearchField("name", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("description", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.StandardLucene, + }, + new SearchField("category", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("ownerId", SearchFieldDataType.String) + { + Retrievable = true, + Searchable = true, + Filterable = true, + Sortable = true, + Facetable = true, + Analyzer = LexicalAnalyzerName.EnLucene, + }, + new SearchField("price", SearchFieldDataType.Double) + { + Retrievable = true, + Filterable = true, + Sortable = true, + Facetable = true, + } +}) +{ + ScoringProfiles = {new ScoringProfile("stringFieldBoost") + { + TextWeights = new TextWeights(new Dictionary + { + ["name"] = 3, + ["description"] = 1, + ["category"] = 2, + ["ownerId"] = 1 + }), + Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, + }}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields + { + TitleField = new SemanticField("category"), + ContentFields = {new SemanticField("description")}, + KeywordsFields = {new SemanticField("ownerId")}, + }) + { + FlightingOptIn = true, + }}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") + { + VectorizerName = "openai", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config2", "euclidean") + { + VectorizerName = "custom-web-api", + CompressionName = "mySQ8", + }, new VectorSearchProfile("config3", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQC", + }, new VectorSearchProfile("config4", "dotProduct") + { + VectorizerName = "custom-web-api", + CompressionName = "myBQWithoutOriginals", + }}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Cosine, + }, + }, new HnswAlgorithmConfiguration("euclidean") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.Euclidean, + }, + }, new HnswAlgorithmConfiguration("dotProduct") + { + Parameters = new HnswParameters + { + Metric = VectorSearchAlgorithmMetric.DotProduct, + }, + }}, + Vectorizers = {new AzureOpenAIVectorizer("openai") + { + Parameters = new AzureOpenAIVectorizerParameters + { + ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), + DeploymentName = "model", + ApiKey = "api-key", + ModelName = AzureOpenAIModelName.TextEmbedding3Large, + }, + }, new WebApiVectorizer("custom-web-api") + { + WebApiParameters = new WebApiVectorizerParameters + { + Url = new Uri("https://my-custom-endpoint.org/"), + HttpHeaders = + { + ["header1"] = "value1", + ["header2"] = "value2" + }, + HttpMethod = "POST", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + }, new AzureMachineLearningVectorizer("aml") + { + AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) + { + ResourceId = "aml resource id", + Timeout = XmlConvert.ToTimeSpan("PT1M"), + Region = "aml region", + ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, + }, + }}, + Compressions = {new ScalarQuantizationCompression("mySQ8") + { + Parameters = new ScalarQuantizationParameters + { + QuantizedDataType = VectorSearchCompressionTarget.Int8, + }, + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQC") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, + }, + TruncationDimension = 2, + }, new BinaryQuantizationCompression("myBQWithoutOriginals") + { + RescoringOptions = new RescoringOptions + { + EnableRescoring = true, + DefaultOversampling = 10, + RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, + }, + TruncationDimension = 2, + }}, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = client.CreateOrUpdate("temp-preview-test", index); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { + new + { + name = "id", + type = "Edm.String", + key = true, + sortable = true, + }, + new + { + name = "vector1", + retrievable = true, + searchable = true, + dimensions = 20, + vectorSearchProfile = "config1", + }, + new + { + name = "vector1b", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector2", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector3", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector22", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector4", + retrievable = true, + searchable = true, + dimensions = 32, + vectorSearchProfile = "config4", + }, + new + { + name = "name", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "description", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "standard.lucene", + }, + new + { + name = "category", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "ownerId", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "price", + type = "Edm.Double", + retrievable = true, + filterable = true, + sortable = true, + facetable = true, + } + }, + ["scoringProfiles"] = new object[] + { + new + { + name = "stringFieldBoost", + text = new + { + weights = new + { + name = 3, + description = 1, + category = 2, + ownerId = 1, + }, + }, + functions = new object[] + { + new + { + tag = new + { + tagsParameter = "categoryTag", + }, + type = "tag", + fieldName = "category", + boost = 2, + } + }, + } + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { + "https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { + new + { + name = "sg", + searchMode = "analyzingInfixMatching", + sourceFields = new object[] + { + "category", + "ownerId" + }, + } + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { + new + { + name = "testconfig", + prioritizedFields = new + { + titleField = new + { + fieldName = "category", + }, + prioritizedContentFields = new object[] + { + new + { + fieldName = "description", + } + }, + prioritizedKeywordsFields = new object[] + { + new + { + fieldName = "ownerId", + } + }, + }, + flightingOptIn = true, + } + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { + new + { + name = "config1", + algorithm = "cosine", + vectorizer = "openai", + compression = "mySQ8", + }, + new + { + name = "config2", + algorithm = "euclidean", + vectorizer = "custom-web-api", + compression = "mySQ8", + }, + new + { + name = "config3", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQC", + }, + new + { + name = "config4", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQWithoutOriginals", + } + }, + algorithms = new object[] + { + new + { + hnswParameters = new + { + metric = "cosine", + }, + name = "cosine", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "euclidean", + }, + name = "euclidean", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "dotProduct", + }, + name = "dotProduct", + kind = "hnsw", + } + }, + vectorizers = new object[] + { + new + { + azureOpenAIParameters = new + { + resourceUri = "https://test-sample.openai.azure.com/", + deploymentId = "model", + apiKey = "api-key", + modelName = "text-embedding-3-large", + }, + name = "openai", + kind = "azureOpenAI", + }, + new + { + customWebApiParameters = new + { + uri = "https://my-custom-endpoint.org/", + httpHeaders = new + { + header1 = "value1", + header2 = "value2", + }, + httpMethod = "POST", + timeout = "PT1M", + authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + name = "custom-web-api", + kind = "customWebApi", + }, + new + { + amlParameters = new + { + uri = "https://my-custom-endpoint.org/", + resourceId = "aml resource id", + timeout = "PT1M", + region = "aml region", + modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", + }, + name = "aml", + kind = "aml", + } + }, + compressions = new object[] + { + new + { + scalarQuantizationParameters = new + { + quantizedDataType = "int8", + }, + name = "mySQ8", + kind = "scalarQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQC", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQWithoutOriginals", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "discardOriginals", + }, + truncationDimension = 2, + } + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateOrUpdateAsync("temp-preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { + new + { + name = "id", + type = "Edm.String", + key = true, + sortable = true, + }, + new + { + name = "vector1", + retrievable = true, + searchable = true, + dimensions = 20, + vectorSearchProfile = "config1", + }, + new + { + name = "vector1b", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector2", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector3", + retrievable = true, + searchable = true, + dimensions = 5, + vectorSearchProfile = "config3", + }, + new + { + name = "vector22", + retrievable = true, + searchable = true, + dimensions = 10, + vectorSearchProfile = "config2", + }, + new + { + name = "vector4", + retrievable = true, + searchable = true, + dimensions = 32, + vectorSearchProfile = "config4", + }, + new + { + name = "name", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "description", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "standard.lucene", + }, + new + { + name = "category", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "ownerId", + type = "Edm.String", + retrievable = true, + searchable = true, + filterable = true, + sortable = true, + facetable = true, + analyzer = "en.lucene", + }, + new + { + name = "price", + type = "Edm.Double", + retrievable = true, + filterable = true, + sortable = true, + facetable = true, + } + }, + ["scoringProfiles"] = new object[] + { + new + { + name = "stringFieldBoost", + text = new + { + weights = new + { + name = 3, + description = 1, + category = 2, + ownerId = 1, + }, + }, + functions = new object[] + { + new + { + tag = new + { + tagsParameter = "categoryTag", + }, + type = "tag", + fieldName = "category", + boost = 2, + } + }, + } + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { + "https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { + new + { + name = "sg", + searchMode = "analyzingInfixMatching", + sourceFields = new object[] + { + "category", + "ownerId" + }, + } + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { + new + { + name = "testconfig", + prioritizedFields = new + { + titleField = new + { + fieldName = "category", + }, + prioritizedContentFields = new object[] + { + new + { + fieldName = "description", + } + }, + prioritizedKeywordsFields = new object[] + { + new + { + fieldName = "ownerId", + } + }, + }, + flightingOptIn = true, + } + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { + new + { + name = "config1", + algorithm = "cosine", + vectorizer = "openai", + compression = "mySQ8", + }, + new + { + name = "config2", + algorithm = "euclidean", + vectorizer = "custom-web-api", + compression = "mySQ8", + }, + new + { + name = "config3", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQC", + }, + new + { + name = "config4", + algorithm = "dotProduct", + vectorizer = "custom-web-api", + compression = "myBQWithoutOriginals", + } + }, + algorithms = new object[] + { + new + { + hnswParameters = new + { + metric = "cosine", + }, + name = "cosine", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "euclidean", + }, + name = "euclidean", + kind = "hnsw", + }, + new + { + hnswParameters = new + { + metric = "dotProduct", + }, + name = "dotProduct", + kind = "hnsw", + } + }, + vectorizers = new object[] + { + new + { + azureOpenAIParameters = new + { + resourceUri = "https://test-sample.openai.azure.com/", + deploymentId = "model", + apiKey = "api-key", + modelName = "text-embedding-3-large", + }, + name = "openai", + kind = "azureOpenAI", + }, + new + { + customWebApiParameters = new + { + uri = "https://my-custom-endpoint.org/", + httpHeaders = new + { + header1 = "value1", + header2 = "value2", + }, + httpMethod = "POST", + timeout = "PT1M", + authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", + }, + name = "custom-web-api", + kind = "customWebApi", + }, + new + { + amlParameters = new + { + uri = "https://my-custom-endpoint.org/", + resourceId = "aml resource id", + timeout = "PT1M", + region = "aml region", + modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", + }, + name = "aml", + kind = "aml", + } + }, + compressions = new object[] + { + new + { + scalarQuantizationParameters = new + { + quantizedDataType = "int8", + }, + name = "mySQ8", + kind = "scalarQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQC", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "preserveOriginals", + }, + truncationDimension = 2, + }, + new + { + name = "myBQWithoutOriginals", + kind = "binaryQuantization", + rescoringOptions = new + { + enableRescoring = true, + defaultOversampling = 10, + rescoreStorageMethod = "discardOriginals", + }, + truncationDimension = 2, + } + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.CreateOrUpdate("temp-preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = await client.DeleteAsync("temp-preview-test"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = client.Delete("temp-preview-test"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetIndexAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = await client.GetIndexAsync("preview-test"); +]]> + + + +This sample shows how to call GetIndex. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = client.GetIndex("preview-test"); +]]> + + + +This sample shows how to call GetIndexAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = await client.GetIndexAsync("preview-test", (RequestContext)null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call GetIndex and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = client.GetIndex("preview-test", (RequestContext)null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +]]> + + + +This sample shows how to call GetStatisticsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = await client.GetStatisticsAsync("preview-test"); +]]> + + + +This sample shows how to call GetStatistics. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = client.GetStatistics("preview-test"); +]]> + + + +This sample shows how to call GetStatisticsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = await client.GetStatisticsAsync("preview-test", (RequestContext)null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("documentCount").ToString()); +Console.WriteLine(result.GetProperty("storageSize").ToString()); +Console.WriteLine(result.GetProperty("vectorIndexSize").ToString()); +]]> + + + +This sample shows how to call GetStatistics and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Response response = client.GetStatistics("preview-test", (RequestContext)null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("documentCount").ToString()); +Console.WriteLine(result.GetProperty("storageSize").ToString()); +Console.WriteLine(result.GetProperty("vectorIndexSize").ToString()); +]]> + + + +This sample shows how to call AnalyzeAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Search.Documents.Indexes.Models.AnalyzeTextOptions request = new Search.Documents.Indexes.Models.AnalyzeTextOptions("Text to analyze"); +Response response = await client.AnalyzeAsync("preview-test", request); +]]> + + + +This sample shows how to call Analyze. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +Search.Documents.Indexes.Models.AnalyzeTextOptions request = new Search.Documents.Indexes.Models.AnalyzeTextOptions("Text to analyze"); +Response response = client.Analyze("preview-test", request); +]]> + + + +This sample shows how to call AnalyzeAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new +{ + text = "Text to analyze", + analyzer = "ar.lucene", +}); +Response response = await client.AnalyzeAsync("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("token").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("startOffset").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("endOffset").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("position").ToString()); +]]> + + + +This sample shows how to call Analyze and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +using RequestContent content = RequestContent.Create(new +{ + text = "Text to analyze", + analyzer = "ar.lucene", +}); +Response response = client.Analyze("preview-test", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("token").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("startOffset").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("endOffset").ToString()); +Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("position").ToString()); +]]> + + + +This sample shows how to call GetIndexesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +await foreach (SearchIndex item in client.GetIndexesAsync()) +{ +} +]]> + + + +This sample shows how to call GetIndexes. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +foreach (SearchIndex item in client.GetIndexes()) +{ +} +]]> + + + +This sample shows how to call GetIndexesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +await foreach (BinaryData item in client.GetIndexesAsync((string)null, (RequestContext)null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +} +]]> + + + +This sample shows how to call GetIndexes and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + +foreach (BinaryData item in client.GetIndexes((string)null, (RequestContext)null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); +} +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/SearchClient.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/SearchClient.xml new file mode 100644 index 000000000000..0037aeef1966 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/SearchClient.xml @@ -0,0 +1,133 @@ + + + + + +This sample shows how to call GetServiceStatisticsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +Response response = await client.GetServiceStatisticsAsync(); +]]> + + + +This sample shows how to call GetServiceStatistics. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +Response response = client.GetServiceStatistics(); +]]> + + + +This sample shows how to call GetServiceStatisticsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +Response response = await client.GetServiceStatisticsAsync(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("counters").GetProperty("aliasesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("documentCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("indexesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("indexersCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("dataSourcesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("storageSize").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("synonymMaps").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("skillsetCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("vectorIndexSize").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("limits").ToString()); +]]> + + + +This sample shows how to call GetServiceStatistics and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +Response response = client.GetServiceStatistics(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("counters").GetProperty("aliasesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("documentCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("indexesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("indexersCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("dataSourcesCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("storageSize").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("synonymMaps").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("skillsetCount").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("counters").GetProperty("vectorIndexSize").GetProperty("usage").ToString()); +Console.WriteLine(result.GetProperty("limits").ToString()); +]]> + + + +This sample shows how to call GetIndexStatsSummaryAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +await foreach (IndexStatisticsSummary item in client.GetIndexStatsSummaryAsync()) +{ +} +]]> + + + +This sample shows how to call GetIndexStatsSummary. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +foreach (IndexStatisticsSummary item in client.GetIndexStatsSummary()) +{ +} +]]> + + + +This sample shows how to call GetIndexStatsSummaryAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +await foreach (BinaryData item in client.GetIndexStatsSummaryAsync(null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); +} +]]> + + + +This sample shows how to call GetIndexStatsSummary and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SearchClient client = new SearchClient(endpoint, credential); + +foreach (BinaryData item in client.GetIndexStatsSummary(null)) +{ + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); +} +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/Skillsets.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Skillsets.xml new file mode 100644 index 000000000000..92e95c98cc2f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/Skillsets.xml @@ -0,0 +1,447 @@ + + + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) +{ + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = await client.CreateOrUpdateAsync("tempskillset", skillset); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) +{ + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = client.CreateOrUpdate("tempskillset", skillset); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = await client.CreateOrUpdateAsync("tempskillset", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = client.CreateOrUpdate("tempskillset", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = await client.DeleteAsync("tempskillset"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = client.Delete("tempskillset"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetSkillsetAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = await client.GetSkillsetAsync("myskillset"); +]]> + + + +This sample shows how to call GetSkillset. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = client.GetSkillset("myskillset"); +]]> + + + +This sample shows how to call GetSkillsetAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = await client.GetSkillsetAsync("myskillset", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetSkillset and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = client.GetSkillset("myskillset", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetSkillsetsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = await client.GetSkillsetsAsync(); +]]> + + + +This sample shows how to call GetSkillsets. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = client.GetSkillsets(); +]]> + + + +This sample shows how to call GetSkillsetsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = await client.GetSkillsetsAsync(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetSkillsets and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +Response response = client.GetSkillsets(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) +{ + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = await client.CreateAsync(skillset); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) +{ + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, +}; +Response response = client.Create(skillset); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call ResetSkillsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +ResetSkillsOptions resetSkillsOptions = null; +Response response = await client.ResetSkillsAsync("myskillset", resetSkillsOptions); +]]> + + + +This sample shows how to call ResetSkills. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +ResetSkillsOptions resetSkillsOptions = null; +Response response = client.ResetSkills("myskillset", resetSkillsOptions); +]]> + + + +This sample shows how to call ResetSkillsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new +{ + skillNames = new object[] + { + "skill2", + "skill3", + "skill4" + }, +}); +Response response = await client.ResetSkillsAsync("myskillset", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call ResetSkills. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + +using RequestContent content = RequestContent.Create(new +{ + skillNames = new object[] + { + "skill2", + "skill3", + "skill4" + }, +}); +Response response = client.ResetSkills("myskillset", content); + +Console.WriteLine(response.Status); +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Docs/SynonymMaps.xml b/sdk/search/Azure.Search.Documents/src/Generated/Docs/SynonymMaps.xml new file mode 100644 index 000000000000..031f8295364f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Docs/SynonymMaps.xml @@ -0,0 +1,367 @@ + + + + + +This sample shows how to call CreateOrUpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +SynonymMap synonymMap = new SynonymMap("mysynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") +{ + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateOrUpdateAsync("mysynonymmap", synonymMap); +]]> + + + +This sample shows how to call CreateOrUpdate. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +SynonymMap synonymMap = new SynonymMap("mysynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") +{ + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = client.CreateOrUpdate("mysynonymmap", synonymMap); +]]> + + + +This sample shows how to call CreateOrUpdateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "mysynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateOrUpdateAsync("mysynonymmap", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call CreateOrUpdate and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "mysynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.CreateOrUpdate("mysynonymmap", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = await client.DeleteAsync("tempsynonymmap"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = client.Delete("tempsynonymmap"); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetSynonymMapAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = await client.GetSynonymMapAsync("mysynonymmap"); +]]> + + + +This sample shows how to call GetSynonymMap. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = client.GetSynonymMap("mysynonymmap"); +]]> + + + +This sample shows how to call GetSynonymMapAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = await client.GetSynonymMapAsync("mysynonymmap", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call GetSynonymMap and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = client.GetSynonymMap("mysynonymmap", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call GetSynonymMapsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = await client.GetSynonymMapsAsync(); +]]> + + + +This sample shows how to call GetSynonymMaps. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = client.GetSynonymMaps(); +]]> + + + +This sample shows how to call GetSynonymMapsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = await client.GetSynonymMapsAsync(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call GetSynonymMaps and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +Response response = client.GetSynonymMaps(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("value")[0].GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +SynonymMap synonymMap = new SynonymMap("tempsynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") +{ + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = await client.CreateAsync(synonymMap); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +SynonymMap synonymMap = new SynonymMap("tempsynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") +{ + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", +}; +Response response = client.Create(synonymMap); +]]> + + + +This sample shows how to call CreateAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempsynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = await client.CreateAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + +This sample shows how to call Create and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + +using RequestContent content = RequestContent.Create(new Dictionary +{ + ["name"] = "tempsynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" +}); +Response response = client.Create(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("format").ToString()); +Console.WriteLine(result.GetProperty("synonyms").ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.Serialization.cs new file mode 100644 index 000000000000..7af28b927681 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.Serialization.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DocumentDebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentDebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Semantic)) + { + writer.WritePropertyName("semantic"u8); + writer.WriteObjectValue(Semantic, options); + } + if (options.Format != "W" && Optional.IsDefined(Vectors)) + { + writer.WritePropertyName("vectors"u8); + writer.WriteObjectValue(Vectors, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + DocumentDebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentDebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentDebugInfo(document.RootElement, options); + } + + internal static DocumentDebugInfo DeserializeDocumentDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SemanticDebugInfo semantic = default; + VectorsDebugInfo vectors = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("semantic"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + semantic = SemanticDebugInfo.DeserializeSemanticDebugInfo(property.Value, options); + continue; + } + if (property.NameEquals("vectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + vectors = VectorsDebugInfo.DeserializeVectorsDebugInfo(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DocumentDebugInfo(semantic, vectors, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DocumentDebugInfo)} does not support writing '{options.Format}' format."); + } + } + + DocumentDebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentDebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DocumentDebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.cs new file mode 100644 index 000000000000..b6c3771ec857 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentDebugInfo.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains debugging information that can be used to further explore your search + /// results. + /// + public partial class DocumentDebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal DocumentDebugInfo() + { + } + + /// Initializes a new instance of . + /// Contains debugging information specific to semantic ranking requests. + /// Contains debugging information specific to vector and hybrid search. + /// Keeps track of any properties unknown to the library. + internal DocumentDebugInfo(SemanticDebugInfo semantic, VectorsDebugInfo vectors, IDictionary serializedAdditionalRawData) + { + Semantic = semantic; + Vectors = vectors; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Contains debugging information specific to semantic ranking requests. + public SemanticDebugInfo Semantic { get; } + /// Contains debugging information specific to vector and hybrid search. + public VectorsDebugInfo Vectors { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.Serialization.cs new file mode 100644 index 000000000000..73f0610a931d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.Serialization.cs @@ -0,0 +1,243 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DocumentExtractionSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentExtractionSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(ParsingMode)) + { + writer.WritePropertyName("parsingMode"u8); + writer.WriteStringValue(ParsingMode); + } + if (Optional.IsDefined(DataToExtract)) + { + writer.WritePropertyName("dataToExtract"u8); + writer.WriteStringValue(DataToExtract); + } + if (Optional.IsCollectionDefined(Configuration)) + { + writer.WritePropertyName("configuration"u8); + writer.WriteStartObject(); + foreach (var item in Configuration) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + } + + DocumentExtractionSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentExtractionSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentExtractionSkill(document.RootElement, options); + } + + internal static DocumentExtractionSkill DeserializeDocumentExtractionSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string parsingMode = default; + string dataToExtract = default; + IDictionary configuration = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("parsingMode"u8)) + { + parsingMode = property.Value.GetString(); + continue; + } + if (property.NameEquals("dataToExtract"u8)) + { + dataToExtract = property.Value.GetString(); + continue; + } + if (property.NameEquals("configuration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property0.Name, null); + } + else + { + dictionary.Add(property0.Name, BinaryData.FromString(property0.Value.GetRawText())); + } + } + configuration = dictionary; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DocumentExtractionSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + parsingMode, + dataToExtract, + configuration ?? new ChangeTrackingDictionary()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DocumentExtractionSkill)} does not support writing '{options.Format}' format."); + } + } + + DocumentExtractionSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentExtractionSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentExtractionSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new DocumentExtractionSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentExtractionSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.cs new file mode 100644 index 000000000000..5780ccb90cb7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentExtractionSkill.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill that extracts content from a file within the enrichment pipeline. + public partial class DocumentExtractionSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public DocumentExtractionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Util.DocumentExtractionSkill"; + Configuration = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// The parsingMode for the skill. Will be set to 'default' if not defined. + /// The type of data to be extracted for the skill. Will be set to 'contentAndMetadata' if not defined. + /// A dictionary of configurations for the skill. + internal DocumentExtractionSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string parsingMode, string dataToExtract, IDictionary configuration) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + ParsingMode = parsingMode; + DataToExtract = dataToExtract; + Configuration = configuration; + } + + /// Initializes a new instance of for deserialization. + internal DocumentExtractionSkill() + { + } + + /// The parsingMode for the skill. Will be set to 'default' if not defined. + public string ParsingMode { get; set; } + /// The type of data to be extracted for the skill. Will be set to 'contentAndMetadata' if not defined. + public string DataToExtract { get; set; } + /// + /// A dictionary of configurations for the skill. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary Configuration { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.Serialization.cs new file mode 100644 index 000000000000..3b3d1bc6ac12 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.Serialization.cs @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class DocumentIntelligenceLayoutSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentIntelligenceLayoutSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(OutputMode)) + { + writer.WritePropertyName("outputMode"u8); + writer.WriteStringValue(OutputMode.Value.ToString()); + } + if (Optional.IsDefined(MarkdownHeaderDepth)) + { + writer.WritePropertyName("markdownHeaderDepth"u8); + writer.WriteStringValue(MarkdownHeaderDepth.Value.ToString()); + } + } + + DocumentIntelligenceLayoutSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentIntelligenceLayoutSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentIntelligenceLayoutSkill(document.RootElement, options); + } + + internal static DocumentIntelligenceLayoutSkill DeserializeDocumentIntelligenceLayoutSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentIntelligenceLayoutSkillOutputMode? outputMode = default; + DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? markdownHeaderDepth = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("outputMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + outputMode = new DocumentIntelligenceLayoutSkillOutputMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("markdownHeaderDepth"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + markdownHeaderDepth = new DocumentIntelligenceLayoutSkillMarkdownHeaderDepth(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DocumentIntelligenceLayoutSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + outputMode, + markdownHeaderDepth); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DocumentIntelligenceLayoutSkill)} does not support writing '{options.Format}' format."); + } + } + + DocumentIntelligenceLayoutSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentIntelligenceLayoutSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentIntelligenceLayoutSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new DocumentIntelligenceLayoutSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDocumentIntelligenceLayoutSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.cs new file mode 100644 index 000000000000..11e7e6063060 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkill.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill that extracts content and layout information (as markdown), via Azure + /// AI Services, from files within the enrichment pipeline. + /// + public partial class DocumentIntelligenceLayoutSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public DocumentIntelligenceLayoutSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. + /// The depth of headers in the markdown output. Default is h6. + internal DocumentIntelligenceLayoutSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, DocumentIntelligenceLayoutSkillOutputMode? outputMode, DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? markdownHeaderDepth) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + OutputMode = outputMode; + MarkdownHeaderDepth = markdownHeaderDepth; + } + + /// Initializes a new instance of for deserialization. + internal DocumentIntelligenceLayoutSkill() + { + } + + /// Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. + public DocumentIntelligenceLayoutSkillOutputMode? OutputMode { get; set; } + /// The depth of headers in the markdown output. Default is h6. + public DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? MarkdownHeaderDepth { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs rename to sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs index b6b8060a33cf..193a33b01040 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillMarkdownHeaderDepth.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The depth of headers in the markdown output. Default is h6. public readonly partial struct DocumentIntelligenceLayoutSkillMarkdownHeaderDepth : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillOutputMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillOutputMode.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillOutputMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillOutputMode.cs index 50559c358b55..c5d470a674b7 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkillOutputMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/DocumentIntelligenceLayoutSkillOutputMode.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. public readonly partial struct DocumentIntelligenceLayoutSkillOutputMode : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Documents.cs b/sdk/search/Azure.Search.Documents/src/Generated/Documents.cs new file mode 100644 index 000000000000..af3801fb2d00 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Documents.cs @@ -0,0 +1,2500 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The Documents sub-client. + public partial class Documents + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of Documents for mocking. + protected Documents() + { + } + + /// Initializes a new instance of Documents. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal Documents(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// Queries the number of documents in the index. + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CountAsync(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CountAsync(indexName, context).ConfigureAwait(false); + return Response.FromValue(response.Content.ToObjectFromJson(), response); + } + + /// Queries the number of documents in the index. + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response Count(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Count(indexName, context); + return Response.FromValue(response.Content.ToObjectFromJson(), response); + } + + /// + /// [Protocol Method] Queries the number of documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CountAsync(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.Count"); + scope.Start(); + try + { + using HttpMessage message = CreateCountRequest(indexName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Queries the number of documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Count(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.Count"); + scope.Start(); + try + { + using HttpMessage message = CreateCountRequest(indexName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Searches for documents in the index. + /// The name of the index. + /// + /// A full-text search query expression; Use "*" or omit this parameter to match + /// all documents. + /// + /// + /// A value that specifies whether to fetch the total count of results. Default is + /// false. Setting this value to true may have a performance impact. Note that the + /// count returned is an approximation. + /// + /// + /// The list of facet expressions to apply to the search query. Each facet + /// expression contains a field name, optionally followed by a comma-separated list + /// of name:value pairs. + /// + /// The OData $filter expression to apply to the search query. + /// + /// The list of field names to use for hit highlights. Only searchable fields can + /// be used for hit highlighting. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, and desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no OrderBy is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. + /// + /// + /// The list of parameter values to be used in scoring functions (for example, + /// referencePointParameter) using the format name-values. For example, if the + /// scoring profile defines a function with a parameter called 'mylocation' the + /// parameter string would be "mylocation--122.2,44.8" (without the quotes). + /// + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + /// + /// The list of field names to which to scope the full-text search. When using + /// fielded search (fieldName:searchExpression) in a full Lucene query, the field + /// names of each fielded search expression take precedence over any field names + /// listed in this parameter. + /// + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. + /// + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. + /// + /// + /// A value to be used to create a sticky session, which can help to get more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + /// + /// The list of fields to retrieve. If unspecified, all fields marked as + /// retrievable in the schema are included. + /// + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use $skip due to + /// this limitation, consider using $orderby on a totally-ordered key and $filter + /// with a range query instead. + /// + /// + /// The number of search results to retrieve. This can be used in conjunction with + /// $skip to implement client-side paging of search results. If results are + /// truncated due to server-side paging, the response will include a continuation + /// token that can be used to issue another Search request for the next page of + /// results. + /// + /// + /// The name of the semantic configuration that lists which fields should be used + /// for semantic ranking, captions, highlights, and answers + /// + /// + /// Allows the user to choose whether a semantic call should fail completely, or to + /// return partial results (default). + /// + /// + /// Allows the user to set an upper bound on the amount of time it takes for + /// semantic enrichment to finish processing before the request fails. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns answers extracted from key passages in the highest ranked documents. + /// The number of answers returned can be configured by appending the pipe + /// character `|` followed by the `count-<number of answers>` option after the + /// answers parameter value, such as `extractive|count-3`. Default count is 1. The + /// confidence threshold can be configured by appending the pipe character `|` + /// followed by the `threshold-<confidence threshold>` option after the answers + /// parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. + /// The maximum character length of answers can be configured by appending the pipe + /// character '|' followed by the 'count-<number of maximum character length>', + /// such as 'extractive|maxcharlength-600'. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns captions extracted from key passages in the highest ranked documents. + /// When Captions is set to `extractive`, highlighting is enabled by default, and + /// can be configured by appending the pipe character `|` followed by the + /// `highlight-<true/false>` option, such as `extractive|highlight-true`. Defaults + /// to `None`. The maximum character length of captions can be configured by + /// appending the pipe character '|' followed by the 'count-<number of maximum + /// character length>', such as 'extractive|maxcharlength-600'. + /// + /// + /// Allows setting a separate search query that will be solely used for semantic + /// reranking, semantic captions and semantic answers. Is useful for scenarios + /// where there is a need to use different queries between the base retrieval and + /// ranking phase, and the L2 semantic phase. + /// + /// + /// When QueryRewrites is set to `generative`, the query terms are sent to a + /// generate model which will produce 10 (default) rewrites to help increase the + /// recall of the request. The requested count can be configured by appending the + /// pipe character `|` followed by the `count-<number of rewrites>` option, such as + /// `generative|count-3`. Defaults to `None`. This parameter is only valid if the + /// query type is `semantic`. + /// + /// + /// Enables a debugging tool that can be used to further explore your search + /// results. + /// + /// The language of the query. + /// Improve search recall by spell-correcting individual search query terms. + /// The list of field names used for semantic ranking. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> SearchGetAsync(string indexName, string searchText = null, bool? includeTotalResultCount = null, IEnumerable facets = null, string filter = null, IEnumerable highlightFields = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable orderBy = null, SearchQueryType? queryType = null, IEnumerable scoringParameters = null, string scoringProfile = null, IEnumerable searchFields = null, SearchMode? searchMode = null, ScoringStatistics? scoringStatistics = null, string sessionId = null, IEnumerable select = null, int? skip = null, int? top = null, string semanticConfiguration = null, SemanticErrorMode? semanticErrorHandling = null, int? semanticMaxWaitInMilliseconds = null, QueryAnswerType? answers = null, QueryCaptionType? captions = null, string semanticQuery = null, QueryRewritesType? queryRewrites = null, QueryDebugMode? debug = null, QueryLanguage? queryLanguage = null, QuerySpellerType? speller = null, IEnumerable semanticFields = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await SearchGetAsync(indexName, searchText, includeTotalResultCount, facets, filter, highlightFields, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, queryType?.ToSerialString(), scoringParameters, scoringProfile, searchFields, searchMode?.ToString(), scoringStatistics?.ToString(), sessionId, select, skip, top, semanticConfiguration, semanticErrorHandling?.ToString(), semanticMaxWaitInMilliseconds, answers?.ToString(), captions?.ToString(), semanticQuery, queryRewrites?.ToString(), debug?.ToString(), queryLanguage?.ToString(), speller?.ToString(), semanticFields, context).ConfigureAwait(false); + return Response.FromValue(SearchDocumentsResult.FromResponse(response), response); + } + + /// Searches for documents in the index. + /// The name of the index. + /// + /// A full-text search query expression; Use "*" or omit this parameter to match + /// all documents. + /// + /// + /// A value that specifies whether to fetch the total count of results. Default is + /// false. Setting this value to true may have a performance impact. Note that the + /// count returned is an approximation. + /// + /// + /// The list of facet expressions to apply to the search query. Each facet + /// expression contains a field name, optionally followed by a comma-separated list + /// of name:value pairs. + /// + /// The OData $filter expression to apply to the search query. + /// + /// The list of field names to use for hit highlights. Only searchable fields can + /// be used for hit highlighting. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, and desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no OrderBy is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. + /// + /// + /// The list of parameter values to be used in scoring functions (for example, + /// referencePointParameter) using the format name-values. For example, if the + /// scoring profile defines a function with a parameter called 'mylocation' the + /// parameter string would be "mylocation--122.2,44.8" (without the quotes). + /// + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + /// + /// The list of field names to which to scope the full-text search. When using + /// fielded search (fieldName:searchExpression) in a full Lucene query, the field + /// names of each fielded search expression take precedence over any field names + /// listed in this parameter. + /// + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. + /// + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. + /// + /// + /// A value to be used to create a sticky session, which can help to get more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + /// + /// The list of fields to retrieve. If unspecified, all fields marked as + /// retrievable in the schema are included. + /// + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use $skip due to + /// this limitation, consider using $orderby on a totally-ordered key and $filter + /// with a range query instead. + /// + /// + /// The number of search results to retrieve. This can be used in conjunction with + /// $skip to implement client-side paging of search results. If results are + /// truncated due to server-side paging, the response will include a continuation + /// token that can be used to issue another Search request for the next page of + /// results. + /// + /// + /// The name of the semantic configuration that lists which fields should be used + /// for semantic ranking, captions, highlights, and answers + /// + /// + /// Allows the user to choose whether a semantic call should fail completely, or to + /// return partial results (default). + /// + /// + /// Allows the user to set an upper bound on the amount of time it takes for + /// semantic enrichment to finish processing before the request fails. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns answers extracted from key passages in the highest ranked documents. + /// The number of answers returned can be configured by appending the pipe + /// character `|` followed by the `count-<number of answers>` option after the + /// answers parameter value, such as `extractive|count-3`. Default count is 1. The + /// confidence threshold can be configured by appending the pipe character `|` + /// followed by the `threshold-<confidence threshold>` option after the answers + /// parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. + /// The maximum character length of answers can be configured by appending the pipe + /// character '|' followed by the 'count-<number of maximum character length>', + /// such as 'extractive|maxcharlength-600'. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns captions extracted from key passages in the highest ranked documents. + /// When Captions is set to `extractive`, highlighting is enabled by default, and + /// can be configured by appending the pipe character `|` followed by the + /// `highlight-<true/false>` option, such as `extractive|highlight-true`. Defaults + /// to `None`. The maximum character length of captions can be configured by + /// appending the pipe character '|' followed by the 'count-<number of maximum + /// character length>', such as 'extractive|maxcharlength-600'. + /// + /// + /// Allows setting a separate search query that will be solely used for semantic + /// reranking, semantic captions and semantic answers. Is useful for scenarios + /// where there is a need to use different queries between the base retrieval and + /// ranking phase, and the L2 semantic phase. + /// + /// + /// When QueryRewrites is set to `generative`, the query terms are sent to a + /// generate model which will produce 10 (default) rewrites to help increase the + /// recall of the request. The requested count can be configured by appending the + /// pipe character `|` followed by the `count-<number of rewrites>` option, such as + /// `generative|count-3`. Defaults to `None`. This parameter is only valid if the + /// query type is `semantic`. + /// + /// + /// Enables a debugging tool that can be used to further explore your search + /// results. + /// + /// The language of the query. + /// Improve search recall by spell-correcting individual search query terms. + /// The list of field names used for semantic ranking. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response SearchGet(string indexName, string searchText = null, bool? includeTotalResultCount = null, IEnumerable facets = null, string filter = null, IEnumerable highlightFields = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable orderBy = null, SearchQueryType? queryType = null, IEnumerable scoringParameters = null, string scoringProfile = null, IEnumerable searchFields = null, SearchMode? searchMode = null, ScoringStatistics? scoringStatistics = null, string sessionId = null, IEnumerable select = null, int? skip = null, int? top = null, string semanticConfiguration = null, SemanticErrorMode? semanticErrorHandling = null, int? semanticMaxWaitInMilliseconds = null, QueryAnswerType? answers = null, QueryCaptionType? captions = null, string semanticQuery = null, QueryRewritesType? queryRewrites = null, QueryDebugMode? debug = null, QueryLanguage? queryLanguage = null, QuerySpellerType? speller = null, IEnumerable semanticFields = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = SearchGet(indexName, searchText, includeTotalResultCount, facets, filter, highlightFields, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, queryType?.ToSerialString(), scoringParameters, scoringProfile, searchFields, searchMode?.ToString(), scoringStatistics?.ToString(), sessionId, select, skip, top, semanticConfiguration, semanticErrorHandling?.ToString(), semanticMaxWaitInMilliseconds, answers?.ToString(), captions?.ToString(), semanticQuery, queryRewrites?.ToString(), debug?.ToString(), queryLanguage?.ToString(), speller?.ToString(), semanticFields, context); + return Response.FromValue(SearchDocumentsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Searches for documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// + /// A full-text search query expression; Use "*" or omit this parameter to match + /// all documents. + /// + /// + /// A value that specifies whether to fetch the total count of results. Default is + /// false. Setting this value to true may have a performance impact. Note that the + /// count returned is an approximation. + /// + /// + /// The list of facet expressions to apply to the search query. Each facet + /// expression contains a field name, optionally followed by a comma-separated list + /// of name:value pairs. + /// + /// The OData $filter expression to apply to the search query. + /// + /// The list of field names to use for hit highlights. Only searchable fields can + /// be used for hit highlighting. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, and desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no OrderBy is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. Allowed values: "simple" | "full" | "semantic" + /// + /// + /// The list of parameter values to be used in scoring functions (for example, + /// referencePointParameter) using the format name-values. For example, if the + /// scoring profile defines a function with a parameter called 'mylocation' the + /// parameter string would be "mylocation--122.2,44.8" (without the quotes). + /// + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + /// + /// The list of field names to which to scope the full-text search. When using + /// fielded search (fieldName:searchExpression) in a full Lucene query, the field + /// names of each fielded search expression take precedence over any field names + /// listed in this parameter. + /// + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. Allowed values: "any" | "all" + /// + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. Allowed values: "local" | "global" + /// + /// + /// A value to be used to create a sticky session, which can help to get more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + /// + /// The list of fields to retrieve. If unspecified, all fields marked as + /// retrievable in the schema are included. + /// + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use $skip due to + /// this limitation, consider using $orderby on a totally-ordered key and $filter + /// with a range query instead. + /// + /// + /// The number of search results to retrieve. This can be used in conjunction with + /// $skip to implement client-side paging of search results. If results are + /// truncated due to server-side paging, the response will include a continuation + /// token that can be used to issue another Search request for the next page of + /// results. + /// + /// + /// The name of the semantic configuration that lists which fields should be used + /// for semantic ranking, captions, highlights, and answers + /// + /// + /// Allows the user to choose whether a semantic call should fail completely, or to + /// return partial results (default). Allowed values: "partial" | "fail" + /// + /// + /// Allows the user to set an upper bound on the amount of time it takes for + /// semantic enrichment to finish processing before the request fails. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns answers extracted from key passages in the highest ranked documents. + /// The number of answers returned can be configured by appending the pipe + /// character `|` followed by the `count-<number of answers>` option after the + /// answers parameter value, such as `extractive|count-3`. Default count is 1. The + /// confidence threshold can be configured by appending the pipe character `|` + /// followed by the `threshold-<confidence threshold>` option after the answers + /// parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. + /// The maximum character length of answers can be configured by appending the pipe + /// character '|' followed by the 'count-<number of maximum character length>', + /// such as 'extractive|maxcharlength-600'. Allowed values: "none" | "extractive" + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns captions extracted from key passages in the highest ranked documents. + /// When Captions is set to `extractive`, highlighting is enabled by default, and + /// can be configured by appending the pipe character `|` followed by the + /// `highlight-<true/false>` option, such as `extractive|highlight-true`. Defaults + /// to `None`. The maximum character length of captions can be configured by + /// appending the pipe character '|' followed by the 'count-<number of maximum + /// character length>', such as 'extractive|maxcharlength-600'. Allowed values: "none" | "extractive" + /// + /// + /// Allows setting a separate search query that will be solely used for semantic + /// reranking, semantic captions and semantic answers. Is useful for scenarios + /// where there is a need to use different queries between the base retrieval and + /// ranking phase, and the L2 semantic phase. + /// + /// + /// When QueryRewrites is set to `generative`, the query terms are sent to a + /// generate model which will produce 10 (default) rewrites to help increase the + /// recall of the request. The requested count can be configured by appending the + /// pipe character `|` followed by the `count-<number of rewrites>` option, such as + /// `generative|count-3`. Defaults to `None`. This parameter is only valid if the + /// query type is `semantic`. Allowed values: "none" | "generative" + /// + /// + /// Enables a debugging tool that can be used to further explore your search + /// results. Allowed values: "disabled" | "semantic" | "vector" | "queryRewrites" | "all" + /// + /// The language of the query. Allowed values: "none" | "en-us" | "en-gb" | "en-in" | "en-ca" | "en-au" | "fr-fr" | "fr-ca" | "de-de" | "es-es" | "es-mx" | "zh-cn" | "zh-tw" | "pt-br" | "pt-pt" | "it-it" | "ja-jp" | "ko-kr" | "ru-ru" | "cs-cz" | "nl-be" | "nl-nl" | "hu-hu" | "pl-pl" | "sv-se" | "tr-tr" | "hi-in" | "ar-sa" | "ar-eg" | "ar-ma" | "ar-kw" | "ar-jo" | "da-dk" | "no-no" | "bg-bg" | "hr-hr" | "hr-ba" | "ms-my" | "ms-bn" | "sl-sl" | "ta-in" | "vi-vn" | "el-gr" | "ro-ro" | "is-is" | "id-id" | "th-th" | "lt-lt" | "uk-ua" | "lv-lv" | "et-ee" | "ca-es" | "fi-fi" | "sr-ba" | "sr-me" | "sr-rs" | "sk-sk" | "nb-no" | "hy-am" | "bn-in" | "eu-es" | "gl-es" | "gu-in" | "he-il" | "ga-ie" | "kn-in" | "ml-in" | "mr-in" | "fa-ae" | "pa-in" | "te-in" | "ur-pk". + /// Improve search recall by spell-correcting individual search query terms. Allowed values: "none" | "lexicon". + /// The list of field names used for semantic ranking. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task SearchGetAsync(string indexName, string searchText, bool? includeTotalResultCount, IEnumerable facets, string filter, IEnumerable highlightFields, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, string queryType, IEnumerable scoringParameters, string scoringProfile, IEnumerable searchFields, string searchMode, string scoringStatistics, string sessionId, IEnumerable select, int? skip, int? top, string semanticConfiguration, string semanticErrorHandling, int? semanticMaxWaitInMilliseconds, string answers, string captions, string semanticQuery, string queryRewrites, string debug, string queryLanguage, string speller, IEnumerable semanticFields, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SearchGet"); + scope.Start(); + try + { + using HttpMessage message = CreateSearchGetRequest(indexName, searchText, includeTotalResultCount, facets, filter, highlightFields, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, queryType, scoringParameters, scoringProfile, searchFields, searchMode, scoringStatistics, sessionId, select, skip, top, semanticConfiguration, semanticErrorHandling, semanticMaxWaitInMilliseconds, answers, captions, semanticQuery, queryRewrites, debug, queryLanguage, speller, semanticFields, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Searches for documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// + /// A full-text search query expression; Use "*" or omit this parameter to match + /// all documents. + /// + /// + /// A value that specifies whether to fetch the total count of results. Default is + /// false. Setting this value to true may have a performance impact. Note that the + /// count returned is an approximation. + /// + /// + /// The list of facet expressions to apply to the search query. Each facet + /// expression contains a field name, optionally followed by a comma-separated list + /// of name:value pairs. + /// + /// The OData $filter expression to apply to the search query. + /// + /// The list of field names to use for hit highlights. Only searchable fields can + /// be used for hit highlighting. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, and desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no OrderBy is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. Allowed values: "simple" | "full" | "semantic" + /// + /// + /// The list of parameter values to be used in scoring functions (for example, + /// referencePointParameter) using the format name-values. For example, if the + /// scoring profile defines a function with a parameter called 'mylocation' the + /// parameter string would be "mylocation--122.2,44.8" (without the quotes). + /// + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + /// + /// The list of field names to which to scope the full-text search. When using + /// fielded search (fieldName:searchExpression) in a full Lucene query, the field + /// names of each fielded search expression take precedence over any field names + /// listed in this parameter. + /// + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. Allowed values: "any" | "all" + /// + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. Allowed values: "local" | "global" + /// + /// + /// A value to be used to create a sticky session, which can help to get more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + /// + /// The list of fields to retrieve. If unspecified, all fields marked as + /// retrievable in the schema are included. + /// + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use $skip due to + /// this limitation, consider using $orderby on a totally-ordered key and $filter + /// with a range query instead. + /// + /// + /// The number of search results to retrieve. This can be used in conjunction with + /// $skip to implement client-side paging of search results. If results are + /// truncated due to server-side paging, the response will include a continuation + /// token that can be used to issue another Search request for the next page of + /// results. + /// + /// + /// The name of the semantic configuration that lists which fields should be used + /// for semantic ranking, captions, highlights, and answers + /// + /// + /// Allows the user to choose whether a semantic call should fail completely, or to + /// return partial results (default). Allowed values: "partial" | "fail" + /// + /// + /// Allows the user to set an upper bound on the amount of time it takes for + /// semantic enrichment to finish processing before the request fails. + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns answers extracted from key passages in the highest ranked documents. + /// The number of answers returned can be configured by appending the pipe + /// character `|` followed by the `count-<number of answers>` option after the + /// answers parameter value, such as `extractive|count-3`. Default count is 1. The + /// confidence threshold can be configured by appending the pipe character `|` + /// followed by the `threshold-<confidence threshold>` option after the answers + /// parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. + /// The maximum character length of answers can be configured by appending the pipe + /// character '|' followed by the 'count-<number of maximum character length>', + /// such as 'extractive|maxcharlength-600'. Allowed values: "none" | "extractive" + /// + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns captions extracted from key passages in the highest ranked documents. + /// When Captions is set to `extractive`, highlighting is enabled by default, and + /// can be configured by appending the pipe character `|` followed by the + /// `highlight-<true/false>` option, such as `extractive|highlight-true`. Defaults + /// to `None`. The maximum character length of captions can be configured by + /// appending the pipe character '|' followed by the 'count-<number of maximum + /// character length>', such as 'extractive|maxcharlength-600'. Allowed values: "none" | "extractive" + /// + /// + /// Allows setting a separate search query that will be solely used for semantic + /// reranking, semantic captions and semantic answers. Is useful for scenarios + /// where there is a need to use different queries between the base retrieval and + /// ranking phase, and the L2 semantic phase. + /// + /// + /// When QueryRewrites is set to `generative`, the query terms are sent to a + /// generate model which will produce 10 (default) rewrites to help increase the + /// recall of the request. The requested count can be configured by appending the + /// pipe character `|` followed by the `count-<number of rewrites>` option, such as + /// `generative|count-3`. Defaults to `None`. This parameter is only valid if the + /// query type is `semantic`. Allowed values: "none" | "generative" + /// + /// + /// Enables a debugging tool that can be used to further explore your search + /// results. Allowed values: "disabled" | "semantic" | "vector" | "queryRewrites" | "all" + /// + /// The language of the query. Allowed values: "none" | "en-us" | "en-gb" | "en-in" | "en-ca" | "en-au" | "fr-fr" | "fr-ca" | "de-de" | "es-es" | "es-mx" | "zh-cn" | "zh-tw" | "pt-br" | "pt-pt" | "it-it" | "ja-jp" | "ko-kr" | "ru-ru" | "cs-cz" | "nl-be" | "nl-nl" | "hu-hu" | "pl-pl" | "sv-se" | "tr-tr" | "hi-in" | "ar-sa" | "ar-eg" | "ar-ma" | "ar-kw" | "ar-jo" | "da-dk" | "no-no" | "bg-bg" | "hr-hr" | "hr-ba" | "ms-my" | "ms-bn" | "sl-sl" | "ta-in" | "vi-vn" | "el-gr" | "ro-ro" | "is-is" | "id-id" | "th-th" | "lt-lt" | "uk-ua" | "lv-lv" | "et-ee" | "ca-es" | "fi-fi" | "sr-ba" | "sr-me" | "sr-rs" | "sk-sk" | "nb-no" | "hy-am" | "bn-in" | "eu-es" | "gl-es" | "gu-in" | "he-il" | "ga-ie" | "kn-in" | "ml-in" | "mr-in" | "fa-ae" | "pa-in" | "te-in" | "ur-pk". + /// Improve search recall by spell-correcting individual search query terms. Allowed values: "none" | "lexicon". + /// The list of field names used for semantic ranking. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response SearchGet(string indexName, string searchText, bool? includeTotalResultCount, IEnumerable facets, string filter, IEnumerable highlightFields, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, string queryType, IEnumerable scoringParameters, string scoringProfile, IEnumerable searchFields, string searchMode, string scoringStatistics, string sessionId, IEnumerable select, int? skip, int? top, string semanticConfiguration, string semanticErrorHandling, int? semanticMaxWaitInMilliseconds, string answers, string captions, string semanticQuery, string queryRewrites, string debug, string queryLanguage, string speller, IEnumerable semanticFields, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SearchGet"); + scope.Start(); + try + { + using HttpMessage message = CreateSearchGetRequest(indexName, searchText, includeTotalResultCount, facets, filter, highlightFields, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, queryType, scoringParameters, scoringProfile, searchFields, searchMode, scoringStatistics, sessionId, select, skip, top, semanticConfiguration, semanticErrorHandling, semanticMaxWaitInMilliseconds, answers, captions, semanticQuery, queryRewrites, debug, queryLanguage, speller, semanticFields, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Searches for documents in the index. + /// The name of the index. + /// The definition of the Search request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> SearchPostAsync(string indexName, SearchOptions searchOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchOptions, nameof(searchOptions)); + + using RequestContent content = searchOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await SearchPostAsync(indexName, content, context).ConfigureAwait(false); + return Response.FromValue(SearchDocumentsResult.FromResponse(response), response); + } + + /// Searches for documents in the index. + /// The name of the index. + /// The definition of the Search request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response SearchPost(string indexName, SearchOptions searchOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchOptions, nameof(searchOptions)); + + using RequestContent content = searchOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = SearchPost(indexName, content, context); + return Response.FromValue(SearchDocumentsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Searches for documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task SearchPostAsync(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SearchPost"); + scope.Start(); + try + { + using HttpMessage message = CreateSearchPostRequest(indexName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Searches for documents in the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response SearchPost(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SearchPost"); + scope.Start(); + try + { + using HttpMessage message = CreateSearchPostRequest(indexName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves a document from the index. + /// The name of the index. + /// The key of the document to retrieve. + /// + /// List of field names to retrieve for the document; Any field not retrieved will + /// be missing from the returned document. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetDocumentAsync(string indexName, string key, IEnumerable selectedFields = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNullOrEmpty(key, nameof(key)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDocumentAsync(indexName, key, selectedFields, context).ConfigureAwait(false); + return Response.FromValue(LookupDocument.FromResponse(response), response); + } + + /// Retrieves a document from the index. + /// The name of the index. + /// The key of the document to retrieve. + /// + /// List of field names to retrieve for the document; Any field not retrieved will + /// be missing from the returned document. + /// + /// The cancellation token to use. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// + public virtual Response GetDocument(string indexName, string key, IEnumerable selectedFields = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNullOrEmpty(key, nameof(key)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDocument(indexName, key, selectedFields, context); + return Response.FromValue(LookupDocument.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves a document from the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The key of the document to retrieve. + /// + /// List of field names to retrieve for the document; Any field not retrieved will + /// be missing from the returned document. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDocumentAsync(string indexName, string key, IEnumerable selectedFields, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNullOrEmpty(key, nameof(key)); + + using var scope = ClientDiagnostics.CreateScope("Documents.GetDocument"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDocumentRequest(indexName, key, selectedFields, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves a document from the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The key of the document to retrieve. + /// + /// List of field names to retrieve for the document; Any field not retrieved will + /// be missing from the returned document. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDocument(string indexName, string key, IEnumerable selectedFields, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNullOrEmpty(key, nameof(key)); + + using var scope = ClientDiagnostics.CreateScope("Documents.GetDocument"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDocumentRequest(indexName, key, selectedFields, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Suggests documents in the index that match the given partial query text. + /// The name of the index. + /// + /// The search text to use to suggest documents. Must be at least 1 character, and + /// no more than 100 characters. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// An OData expression that filters the documents considered for suggestions. + /// + /// A value indicating whether to use fuzzy matching for the suggestions query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// suggestions queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestions query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, or desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no $orderby is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// The list of field names to search for the specified search text. Target fields + /// must be included in the specified suggester. + /// + /// + /// The list of fields to retrieve. If unspecified, only the key field will be + /// included in the results. + /// + /// + /// The number of suggestions to retrieve. The value must be a number between 1 and + /// 100. The default is 5. + /// + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> SuggestGetAsync(string indexName, string searchText, string suggesterName, string filter = null, bool? useFuzzyMatching = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable orderBy = null, IEnumerable searchFields = null, IEnumerable select = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await SuggestGetAsync(indexName, searchText, suggesterName, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, searchFields, select, top, context).ConfigureAwait(false); + return Response.FromValue(SuggestDocumentsResult.FromResponse(response), response); + } + + /// Suggests documents in the index that match the given partial query text. + /// The name of the index. + /// + /// The search text to use to suggest documents. Must be at least 1 character, and + /// no more than 100 characters. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// An OData expression that filters the documents considered for suggestions. + /// + /// A value indicating whether to use fuzzy matching for the suggestions query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// suggestions queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestions query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, or desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no $orderby is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// The list of field names to search for the specified search text. Target fields + /// must be included in the specified suggester. + /// + /// + /// The list of fields to retrieve. If unspecified, only the key field will be + /// included in the results. + /// + /// + /// The number of suggestions to retrieve. The value must be a number between 1 and + /// 100. The default is 5. + /// + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response SuggestGet(string indexName, string searchText, string suggesterName, string filter = null, bool? useFuzzyMatching = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable orderBy = null, IEnumerable searchFields = null, IEnumerable select = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = SuggestGet(indexName, searchText, suggesterName, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, searchFields, select, top, context); + return Response.FromValue(SuggestDocumentsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Suggests documents in the index that match the given partial query text. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// + /// The search text to use to suggest documents. Must be at least 1 character, and + /// no more than 100 characters. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// An OData expression that filters the documents considered for suggestions. + /// + /// A value indicating whether to use fuzzy matching for the suggestions query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// suggestions queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestions query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, or desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no $orderby is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// The list of field names to search for the specified search text. Target fields + /// must be included in the specified suggester. + /// + /// + /// The list of fields to retrieve. If unspecified, only the key field will be + /// included in the results. + /// + /// + /// The number of suggestions to retrieve. The value must be a number between 1 and + /// 100. The default is 5. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task SuggestGetAsync(string indexName, string searchText, string suggesterName, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, IEnumerable searchFields, IEnumerable select, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SuggestGet"); + scope.Start(); + try + { + using HttpMessage message = CreateSuggestGetRequest(indexName, searchText, suggesterName, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, searchFields, select, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Suggests documents in the index that match the given partial query text. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// + /// The search text to use to suggest documents. Must be at least 1 character, and + /// no more than 100 characters. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// An OData expression that filters the documents considered for suggestions. + /// + /// A value indicating whether to use fuzzy matching for the suggestions query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// suggestions queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestions query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of OData $orderby expressions by which to sort the results. Each + /// expression can be either a field name or a call to either the geo.distance() or + /// the search.score() functions. Each expression can be followed by asc to + /// indicate ascending, or desc to indicate descending. The default is ascending + /// order. Ties will be broken by the match scores of documents. If no $orderby is + /// specified, the default sort order is descending by document match score. There + /// can be at most 32 $orderby clauses. + /// + /// + /// The list of field names to search for the specified search text. Target fields + /// must be included in the specified suggester. + /// + /// + /// The list of fields to retrieve. If unspecified, only the key field will be + /// included in the results. + /// + /// + /// The number of suggestions to retrieve. The value must be a number between 1 and + /// 100. The default is 5. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response SuggestGet(string indexName, string searchText, string suggesterName, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, IEnumerable searchFields, IEnumerable select, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SuggestGet"); + scope.Start(); + try + { + using HttpMessage message = CreateSuggestGetRequest(indexName, searchText, suggesterName, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, orderBy, searchFields, select, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Suggests documents in the index that match the given partial query text. + /// The name of the index. + /// The Suggest request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> SuggestPostAsync(string indexName, SuggestOptions suggestOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(suggestOptions, nameof(suggestOptions)); + + using RequestContent content = suggestOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await SuggestPostAsync(indexName, content, context).ConfigureAwait(false); + return Response.FromValue(SuggestDocumentsResult.FromResponse(response), response); + } + + /// Suggests documents in the index that match the given partial query text. + /// The name of the index. + /// The Suggest request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response SuggestPost(string indexName, SuggestOptions suggestOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(suggestOptions, nameof(suggestOptions)); + + using RequestContent content = suggestOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = SuggestPost(indexName, content, context); + return Response.FromValue(SuggestDocumentsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Suggests documents in the index that match the given partial query text. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task SuggestPostAsync(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SuggestPost"); + scope.Start(); + try + { + using HttpMessage message = CreateSuggestPostRequest(indexName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Suggests documents in the index that match the given partial query text. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response SuggestPost(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.SuggestPost"); + scope.Start(); + try + { + using HttpMessage message = CreateSuggestPostRequest(indexName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Sends a batch of document write actions to the index. + /// The name of the index. + /// The batch of index actions. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> IndexAsync(string indexName, IndexBatch batch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(batch, nameof(batch)); + + using RequestContent content = batch.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IndexAsync(indexName, content, context).ConfigureAwait(false); + return Response.FromValue(IndexDocumentsResult.FromResponse(response), response); + } + + /// Sends a batch of document write actions to the index. + /// The name of the index. + /// The batch of index actions. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response Index(string indexName, IndexBatch batch, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(batch, nameof(batch)); + + using RequestContent content = batch.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Index(indexName, content, context); + return Response.FromValue(IndexDocumentsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Sends a batch of document write actions to the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IndexAsync(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.Index"); + scope.Start(); + try + { + using HttpMessage message = CreateIndexRequest(indexName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Sends a batch of document write actions to the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Index(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.Index"); + scope.Start(); + try + { + using HttpMessage message = CreateIndexRequest(indexName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// The name of the index. + /// The incomplete term which should be auto-completed. + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context while + /// producing auto-completed terms. + /// + /// + /// An OData expression that filters the documents used to produce completed terms + /// for the Autocomplete result. + /// + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// autocomplete queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of field names to consider when querying for auto-completed terms. + /// Target fields must be included in the specified suggester. + /// + /// + /// The number of auto-completed terms to retrieve. This must be a value between 1 + /// and 100. The default is 5. + /// + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> AutocompleteGetAsync(string indexName, string searchText, string suggesterName, AutocompleteMode? autocompleteMode = null, string filter = null, bool? useFuzzyMatching = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable searchFields = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AutocompleteGetAsync(indexName, searchText, suggesterName, autocompleteMode?.ToString(), filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, searchFields, top, context).ConfigureAwait(false); + return Response.FromValue(AutocompleteResults.FromResponse(response), response); + } + + /// + /// Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// The name of the index. + /// The incomplete term which should be auto-completed. + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context while + /// producing auto-completed terms. + /// + /// + /// An OData expression that filters the documents used to produce completed terms + /// for the Autocomplete result. + /// + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// autocomplete queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of field names to consider when querying for auto-completed terms. + /// Target fields must be included in the specified suggester. + /// + /// + /// The number of auto-completed terms to retrieve. This must be a value between 1 + /// and 100. The default is 5. + /// + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response AutocompleteGet(string indexName, string searchText, string suggesterName, AutocompleteMode? autocompleteMode = null, string filter = null, bool? useFuzzyMatching = null, string highlightPostTag = null, string highlightPreTag = null, double? minimumCoverage = null, IEnumerable searchFields = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AutocompleteGet(indexName, searchText, suggesterName, autocompleteMode?.ToString(), filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, searchFields, top, context); + return Response.FromValue(AutocompleteResults.FromResponse(response), response); + } + + /// + /// [Protocol Method] Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The incomplete term which should be auto-completed. + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context while + /// producing auto-completed terms. Allowed values: "oneTerm" | "twoTerms" | "oneTermWithContext" + /// + /// + /// An OData expression that filters the documents used to produce completed terms + /// for the Autocomplete result. + /// + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// autocomplete queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of field names to consider when querying for auto-completed terms. + /// Target fields must be included in the specified suggester. + /// + /// + /// The number of auto-completed terms to retrieve. This must be a value between 1 + /// and 100. The default is 5. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AutocompleteGetAsync(string indexName, string searchText, string suggesterName, string autocompleteMode, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable searchFields, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.AutocompleteGet"); + scope.Start(); + try + { + using HttpMessage message = CreateAutocompleteGetRequest(indexName, searchText, suggesterName, autocompleteMode, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, searchFields, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The incomplete term which should be auto-completed. + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' + /// to get shingles and 'oneTermWithContext' to use the current context while + /// producing auto-completed terms. Allowed values: "oneTerm" | "twoTerms" | "oneTermWithContext" + /// + /// + /// An OData expression that filters the documents used to produce completed terms + /// for the Autocomplete result. + /// + /// + /// A value indicating whether to use fuzzy matching for the autocomplete query. + /// Default is false. When set to true, the query will find terms even if there's a + /// substituted or missing character in the search text. While this provides a + /// better experience in some scenarios, it comes at a performance cost as fuzzy + /// autocomplete queries are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by an autocomplete query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The list of field names to consider when querying for auto-completed terms. + /// Target fields must be included in the specified suggester. + /// + /// + /// The number of auto-completed terms to retrieve. This must be a value between 1 + /// and 100. The default is 5. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AutocompleteGet(string indexName, string searchText, string suggesterName, string autocompleteMode, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable searchFields, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(searchText, nameof(searchText)); + Argument.AssertNotNull(suggesterName, nameof(suggesterName)); + + using var scope = ClientDiagnostics.CreateScope("Documents.AutocompleteGet"); + scope.Start(); + try + { + using HttpMessage message = CreateAutocompleteGetRequest(indexName, searchText, suggesterName, autocompleteMode, filter, useFuzzyMatching, highlightPostTag, highlightPreTag, minimumCoverage, searchFields, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// The name of the index. + /// The definition of the Autocomplete request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> AutocompletePostAsync(string indexName, AutocompleteOptions autocompleteOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(autocompleteOptions, nameof(autocompleteOptions)); + + using RequestContent content = autocompleteOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AutocompletePostAsync(indexName, content, context).ConfigureAwait(false); + return Response.FromValue(AutocompleteResults.FromResponse(response), response); + } + + /// + /// Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// The name of the index. + /// The definition of the Autocomplete request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response AutocompletePost(string indexName, AutocompleteOptions autocompleteOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(autocompleteOptions, nameof(autocompleteOptions)); + + using RequestContent content = autocompleteOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AutocompletePost(indexName, content, context); + return Response.FromValue(AutocompleteResults.FromResponse(response), response); + } + + /// + /// [Protocol Method] Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AutocompletePostAsync(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.AutocompletePost"); + scope.Start(); + try + { + using HttpMessage message = CreateAutocompletePostRequest(indexName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Autocompletes incomplete query terms based on input text and matching terms in + /// the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AutocompletePost(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Documents.AutocompletePost"); + scope.Start(); + try + { + using HttpMessage message = CreateAutocompletePostRequest(indexName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCountRequest(string indexName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/$count", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateSearchGetRequest(string indexName, string searchText, bool? includeTotalResultCount, IEnumerable facets, string filter, IEnumerable highlightFields, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, string queryType, IEnumerable scoringParameters, string scoringProfile, IEnumerable searchFields, string searchMode, string scoringStatistics, string sessionId, IEnumerable select, int? skip, int? top, string semanticConfiguration, string semanticErrorHandling, int? semanticMaxWaitInMilliseconds, string answers, string captions, string semanticQuery, string queryRewrites, string debug, string queryLanguage, string speller, IEnumerable semanticFields, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (searchText != null) + { + uri.AppendQuery("search", searchText, true); + } + if (includeTotalResultCount != null) + { + uri.AppendQuery("$count", includeTotalResultCount.Value, true); + } + if (facets != null && !(facets is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + foreach (var param in facets) + { + uri.AppendQuery("facet", param, true); + } + } + if (filter != null) + { + uri.AppendQuery("$filter", filter, true); + } + if (highlightFields != null && !(highlightFields is ChangeTrackingList changeTrackingList0 && changeTrackingList0.IsUndefined)) + { + uri.AppendQueryDelimited("highlight", highlightFields, ",", true); + } + if (highlightPostTag != null) + { + uri.AppendQuery("highlightPostTag", highlightPostTag, true); + } + if (highlightPreTag != null) + { + uri.AppendQuery("highlightPreTag", highlightPreTag, true); + } + if (minimumCoverage != null) + { + uri.AppendQuery("minimumCoverage", minimumCoverage.Value, true); + } + if (orderBy != null && !(orderBy is ChangeTrackingList changeTrackingList1 && changeTrackingList1.IsUndefined)) + { + uri.AppendQueryDelimited("$orderby", orderBy, ",", true); + } + if (queryType != null) + { + uri.AppendQuery("queryType", queryType, true); + } + if (scoringParameters != null && !(scoringParameters is ChangeTrackingList changeTrackingList2 && changeTrackingList2.IsUndefined)) + { + foreach (var param in scoringParameters) + { + uri.AppendQuery("scoringParameter", param, true); + } + } + if (scoringProfile != null) + { + uri.AppendQuery("scoringProfile", scoringProfile, true); + } + if (searchFields != null && !(searchFields is ChangeTrackingList changeTrackingList3 && changeTrackingList3.IsUndefined)) + { + uri.AppendQueryDelimited("searchFields", searchFields, ",", true); + } + if (searchMode != null) + { + uri.AppendQuery("searchMode", searchMode, true); + } + if (scoringStatistics != null) + { + uri.AppendQuery("scoringStatistics", scoringStatistics, true); + } + if (sessionId != null) + { + uri.AppendQuery("sessionId", sessionId, true); + } + if (select != null && !(select is ChangeTrackingList changeTrackingList4 && changeTrackingList4.IsUndefined)) + { + uri.AppendQueryDelimited("$select", select, ",", true); + } + if (skip != null) + { + uri.AppendQuery("$skip", skip.Value, true); + } + if (top != null) + { + uri.AppendQuery("$top", top.Value, true); + } + if (semanticConfiguration != null) + { + uri.AppendQuery("semanticConfiguration", semanticConfiguration, true); + } + if (semanticErrorHandling != null) + { + uri.AppendQuery("semanticErrorHandling", semanticErrorHandling, true); + } + if (semanticMaxWaitInMilliseconds != null) + { + uri.AppendQuery("semanticMaxWaitInMilliseconds", semanticMaxWaitInMilliseconds.Value, true); + } + if (answers != null) + { + uri.AppendQuery("answers", answers, true); + } + if (captions != null) + { + uri.AppendQuery("captions", captions, true); + } + if (semanticQuery != null) + { + uri.AppendQuery("semanticQuery", semanticQuery, true); + } + if (queryRewrites != null) + { + uri.AppendQuery("queryRewrites", queryRewrites, true); + } + if (debug != null) + { + uri.AppendQuery("debug", debug, true); + } + if (queryLanguage != null) + { + uri.AppendQuery("queryLanguage", queryLanguage, true); + } + if (speller != null) + { + uri.AppendQuery("speller", speller, true); + } + if (semanticFields != null && !(semanticFields is ChangeTrackingList changeTrackingList5 && changeTrackingList5.IsUndefined)) + { + uri.AppendQueryDelimited("semanticFields", semanticFields, ",", true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateSearchPostRequest(string indexName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.post.search", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetDocumentRequest(string indexName, string key, IEnumerable selectedFields, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs('", false); + uri.AppendPath(key, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (selectedFields != null && !(selectedFields is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("$select", selectedFields, ",", true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateSuggestGetRequest(string indexName, string searchText, string suggesterName, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable orderBy, IEnumerable searchFields, IEnumerable select, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.suggest", false); + uri.AppendQuery("search", searchText, true); + uri.AppendQuery("suggesterName", suggesterName, true); + uri.AppendQuery("api-version", _apiVersion, true); + if (filter != null) + { + uri.AppendQuery("$filter", filter, true); + } + if (useFuzzyMatching != null) + { + uri.AppendQuery("fuzzy", useFuzzyMatching.Value, true); + } + if (highlightPostTag != null) + { + uri.AppendQuery("highlightPostTag", highlightPostTag, true); + } + if (highlightPreTag != null) + { + uri.AppendQuery("highlightPreTag", highlightPreTag, true); + } + if (minimumCoverage != null) + { + uri.AppendQuery("minimumCoverage", minimumCoverage.Value, true); + } + if (orderBy != null && !(orderBy is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("$orderby", orderBy, ",", true); + } + if (searchFields != null && !(searchFields is ChangeTrackingList changeTrackingList0 && changeTrackingList0.IsUndefined)) + { + uri.AppendQueryDelimited("searchFields", searchFields, ",", true); + } + if (select != null && !(select is ChangeTrackingList changeTrackingList1 && changeTrackingList1.IsUndefined)) + { + uri.AppendQueryDelimited("$select", select, ",", true); + } + if (top != null) + { + uri.AppendQuery("$top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateSuggestPostRequest(string indexName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.post.suggest", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIndexRequest(string indexName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200207); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.index", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAutocompleteGetRequest(string indexName, string searchText, string suggesterName, string autocompleteMode, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, IEnumerable searchFields, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.autocomplete", false); + uri.AppendQuery("search", searchText, true); + uri.AppendQuery("suggesterName", suggesterName, true); + uri.AppendQuery("api-version", _apiVersion, true); + if (autocompleteMode != null) + { + uri.AppendQuery("autocompleteMode", autocompleteMode, true); + } + if (filter != null) + { + uri.AppendQuery("$filter", filter, true); + } + if (useFuzzyMatching != null) + { + uri.AppendQuery("fuzzy", useFuzzyMatching.Value, true); + } + if (highlightPostTag != null) + { + uri.AppendQuery("highlightPostTag", highlightPostTag, true); + } + if (highlightPreTag != null) + { + uri.AppendQuery("highlightPreTag", highlightPreTag, true); + } + if (minimumCoverage != null) + { + uri.AppendQuery("minimumCoverage", minimumCoverage.Value, true); + } + if (searchFields != null && !(searchFields is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("searchFields", searchFields, ",", true); + } + if (top != null) + { + uri.AppendQuery("$top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAutocompletePostRequest(string indexName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/docs/search.post.autocomplete", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier200207; + private static ResponseClassifier ResponseClassifier200207 => _responseClassifier200207 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 207 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/DocumentsRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/DocumentsRestClient.cs deleted file mode 100644 index 557cfebeee78..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/DocumentsRestClient.cs +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - internal partial class DocumentsRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly string _indexName; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of DocumentsRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The name of the index. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , , or is null. - /// is an empty string, and was expected to be non-empty. - public DocumentsRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, string indexName, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _indexName = indexName ?? throw new ArgumentNullException(nameof(indexName)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCountRequest() - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs/$count", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - return message; - } - - /// Queries the number of documents in the index. - /// The cancellation token to use. - public async Task> CountAsync(CancellationToken cancellationToken = default) - { - using var message = CreateCountRequest(); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - long value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = document.RootElement.GetInt64(); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Queries the number of documents in the index. - /// The cancellation token to use. - public Response Count(CancellationToken cancellationToken = default) - { - using var message = CreateCountRequest(); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - long value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = document.RootElement.GetInt64(); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateSearchPostRequest(SearchOptions searchOptions) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs/search.post.search", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(searchOptions); - request.Content = content; - return message; - } - - /// Searches for documents in the index. - /// The definition of the Search request. - /// The cancellation token to use. - /// is null. - public async Task> SearchPostAsync(SearchOptions searchOptions, CancellationToken cancellationToken = default) - { - if (searchOptions == null) - { - throw new ArgumentNullException(nameof(searchOptions)); - } - - using var message = CreateSearchPostRequest(searchOptions); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchDocumentsResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchDocumentsResult.DeserializeSearchDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Searches for documents in the index. - /// The definition of the Search request. - /// The cancellation token to use. - /// is null. - public Response SearchPost(SearchOptions searchOptions, CancellationToken cancellationToken = default) - { - if (searchOptions == null) - { - throw new ArgumentNullException(nameof(searchOptions)); - } - - using var message = CreateSearchPostRequest(searchOptions); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchDocumentsResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchDocumentsResult.DeserializeSearchDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string key, IEnumerable selectedFields) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs('", false); - uri.AppendPath(key, true); - uri.AppendPath("')", false); - if (selectedFields != null && !(selectedFields is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) - { - uri.AppendQueryDelimited("$select", selectedFields, ",", true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - return message; - } - - /// Retrieves a document from the index. - /// The key of the document to retrieve. - /// List of field names to retrieve for the document; Any field not retrieved will be missing from the returned document. - /// The cancellation token to use. - /// is null. - public async Task>> GetAsync(string key, IEnumerable selectedFields = null, CancellationToken cancellationToken = default) - { - if (key == null) - { - throw new ArgumentNullException(nameof(key)); - } - - using var message = CreateGetRequest(key, selectedFields); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - IReadOnlyDictionary value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - Dictionary dictionary = new Dictionary(); - foreach (var property in document.RootElement.EnumerateObject()) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property.Name, null); - } - else - { - dictionary.Add(property.Name, property.Value.GetObject()); - } - } - value = dictionary; - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves a document from the index. - /// The key of the document to retrieve. - /// List of field names to retrieve for the document; Any field not retrieved will be missing from the returned document. - /// The cancellation token to use. - /// is null. - public Response> Get(string key, IEnumerable selectedFields = null, CancellationToken cancellationToken = default) - { - if (key == null) - { - throw new ArgumentNullException(nameof(key)); - } - - using var message = CreateGetRequest(key, selectedFields); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - IReadOnlyDictionary value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - Dictionary dictionary = new Dictionary(); - foreach (var property in document.RootElement.EnumerateObject()) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property.Name, null); - } - else - { - dictionary.Add(property.Name, property.Value.GetObject()); - } - } - value = dictionary; - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateSuggestPostRequest(SuggestOptions suggestOptions) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs/search.post.suggest", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(suggestOptions); - request.Content = content; - return message; - } - - /// Suggests documents in the index that match the given partial query text. - /// The Suggest request. - /// The cancellation token to use. - /// is null. - public async Task> SuggestPostAsync(SuggestOptions suggestOptions, CancellationToken cancellationToken = default) - { - if (suggestOptions == null) - { - throw new ArgumentNullException(nameof(suggestOptions)); - } - - using var message = CreateSuggestPostRequest(suggestOptions); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SuggestDocumentsResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SuggestDocumentsResult.DeserializeSuggestDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Suggests documents in the index that match the given partial query text. - /// The Suggest request. - /// The cancellation token to use. - /// is null. - public Response SuggestPost(SuggestOptions suggestOptions, CancellationToken cancellationToken = default) - { - if (suggestOptions == null) - { - throw new ArgumentNullException(nameof(suggestOptions)); - } - - using var message = CreateSuggestPostRequest(suggestOptions); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SuggestDocumentsResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SuggestDocumentsResult.DeserializeSuggestDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateIndexRequest(IndexBatch batch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs/search.index", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(batch); - request.Content = content; - return message; - } - - /// Sends a batch of document write actions to the index. - /// The batch of index actions. - /// The cancellation token to use. - /// is null. - public async Task> IndexAsync(IndexBatch batch, CancellationToken cancellationToken = default) - { - if (batch == null) - { - throw new ArgumentNullException(nameof(batch)); - } - - using var message = CreateIndexRequest(batch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 207: - { - IndexDocumentsResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = IndexDocumentsResult.DeserializeIndexDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Sends a batch of document write actions to the index. - /// The batch of index actions. - /// The cancellation token to use. - /// is null. - public Response Index(IndexBatch batch, CancellationToken cancellationToken = default) - { - if (batch == null) - { - throw new ArgumentNullException(nameof(batch)); - } - - using var message = CreateIndexRequest(batch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 207: - { - IndexDocumentsResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = IndexDocumentsResult.DeserializeIndexDocumentsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateAutocompletePostRequest(AutocompleteOptions autocompleteOptions) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendRaw("/indexes('", false); - uri.AppendRaw(_indexName, true); - uri.AppendRaw("')", false); - uri.AppendPath("/docs/search.post.autocomplete", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=none"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(autocompleteOptions); - request.Content = content; - return message; - } - - /// Autocompletes incomplete query terms based on input text and matching terms in the index. - /// The definition of the Autocomplete request. - /// The cancellation token to use. - /// is null. - public async Task> AutocompletePostAsync(AutocompleteOptions autocompleteOptions, CancellationToken cancellationToken = default) - { - if (autocompleteOptions == null) - { - throw new ArgumentNullException(nameof(autocompleteOptions)); - } - - using var message = CreateAutocompletePostRequest(autocompleteOptions); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - AutocompleteResults value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = AutocompleteResults.DeserializeAutocompleteResults(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Autocompletes incomplete query terms based on input text and matching terms in the index. - /// The definition of the Autocomplete request. - /// The cancellation token to use. - /// is null. - public Response AutocompletePost(AutocompleteOptions autocompleteOptions, CancellationToken cancellationToken = default) - { - if (autocompleteOptions == null) - { - throw new ArgumentNullException(nameof(autocompleteOptions)); - } - - using var message = CreateAutocompletePostRequest(autocompleteOptions); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - AutocompleteResults value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = AutocompleteResults.DeserializeAutocompleteResults(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.Serialization.cs new file mode 100644 index 000000000000..d00bbf5f6b19 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.Serialization.cs @@ -0,0 +1,181 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class EdgeNGramTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EdgeNGramTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MinGram)) + { + writer.WritePropertyName("minGram"u8); + writer.WriteNumberValue(MinGram.Value); + } + if (Optional.IsDefined(MaxGram)) + { + writer.WritePropertyName("maxGram"u8); + writer.WriteNumberValue(MaxGram.Value); + } + if (Optional.IsDefined(Side)) + { + writer.WritePropertyName("side"u8); + writer.WriteStringValue(Side.Value.ToString()); + } + } + + Search.Documents.Indexes.Models.EdgeNGramTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EdgeNGramTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.EdgeNGramTokenFilter DeserializeEdgeNGramTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? minGram = default; + int? maxGram = default; + EdgeNGramTokenFilterSide? side = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("minGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("side"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + side = new EdgeNGramTokenFilterSide(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.EdgeNGramTokenFilter( + odataType, + name, + serializedAdditionalRawData, + minGram, + maxGram, + side); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EdgeNGramTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.EdgeNGramTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EdgeNGramTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.EdgeNGramTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.cs new file mode 100644 index 000000000000..7d1f9bf134bd --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilter.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Generates n-grams of the given size(s) starting from the front or the back of + /// an input token. This token filter is implemented using Apache Lucene. + /// + public partial class EdgeNGramTokenFilter : Search.Documents.TokenFilter + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + /// The maximum n-gram length. Default is 2. Maximum is 300. + /// + /// Specifies which side of the input the n-gram should be generated from. Default + /// is "front". + /// + internal EdgeNGramTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? minGram, int? maxGram, EdgeNGramTokenFilterSide? side) : base(odataType, name, serializedAdditionalRawData) + { + MinGram = minGram; + MaxGram = maxGram; + Side = side; + } + + /// Initializes a new instance of for deserialization. + internal EdgeNGramTokenFilter() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilterSide.cs b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilterSide.cs new file mode 100644 index 000000000000..5c8140fc2f51 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenFilterSide.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Specifies which side of the input an n-gram should be generated from. + public readonly partial struct EdgeNGramTokenFilterSide : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public EdgeNGramTokenFilterSide(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string FrontValue = "front"; + private const string BackValue = "back"; + + /// Specifies that the n-gram should be generated from the front of the input. + public static EdgeNGramTokenFilterSide Front { get; } = new EdgeNGramTokenFilterSide(FrontValue); + /// Specifies that the n-gram should be generated from the back of the input. + public static EdgeNGramTokenFilterSide Back { get; } = new EdgeNGramTokenFilterSide(BackValue); + /// Determines if two values are the same. + public static bool operator ==(EdgeNGramTokenFilterSide left, EdgeNGramTokenFilterSide right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(EdgeNGramTokenFilterSide left, EdgeNGramTokenFilterSide right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator EdgeNGramTokenFilterSide(string value) => new EdgeNGramTokenFilterSide(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is EdgeNGramTokenFilterSide other && Equals(other); + /// + public bool Equals(EdgeNGramTokenFilterSide other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.Serialization.cs new file mode 100644 index 000000000000..3ea493f8e901 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.Serialization.cs @@ -0,0 +1,193 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class EdgeNGramTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EdgeNGramTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MinGram)) + { + writer.WritePropertyName("minGram"u8); + writer.WriteNumberValue(MinGram.Value); + } + if (Optional.IsDefined(MaxGram)) + { + writer.WritePropertyName("maxGram"u8); + writer.WriteNumberValue(MaxGram.Value); + } + if (Optional.IsCollectionDefined(TokenChars)) + { + writer.WritePropertyName("tokenChars"u8); + writer.WriteStartArray(); + foreach (var item in TokenChars) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + } + + EdgeNGramTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EdgeNGramTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeEdgeNGramTokenizer(document.RootElement, options); + } + + internal static EdgeNGramTokenizer DeserializeEdgeNGramTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? minGram = default; + int? maxGram = default; + IList tokenChars = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("minGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("tokenChars"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new TokenCharacterKind(item.GetString())); + } + tokenChars = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new EdgeNGramTokenizer( + odataType, + name, + serializedAdditionalRawData, + minGram, + maxGram, + tokenChars ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(EdgeNGramTokenizer)} does not support writing '{options.Format}' format."); + } + } + + EdgeNGramTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEdgeNGramTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(EdgeNGramTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new EdgeNGramTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEdgeNGramTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.cs new file mode 100644 index 000000000000..d217849d2f5d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EdgeNGramTokenizer.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Tokenizes the input from an edge into n-grams of the given size(s). This + /// tokenizer is implemented using Apache Lucene. + /// + public partial class EdgeNGramTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public EdgeNGramTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.EdgeNGramTokenizer"; + TokenChars = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + /// The maximum n-gram length. Default is 2. Maximum is 300. + /// Character classes to keep in the tokens. + internal EdgeNGramTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? minGram, int? maxGram, IList tokenChars) : base(odataType, name, serializedAdditionalRawData) + { + MinGram = minGram; + MaxGram = maxGram; + TokenChars = tokenChars; + } + + /// Initializes a new instance of for deserialization. + internal EdgeNGramTokenizer() + { + } + + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + public int? MinGram { get; set; } + /// The maximum n-gram length. Default is 2. Maximum is 300. + public int? MaxGram { get; set; } + /// Character classes to keep in the tokens. + public IList TokenChars { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.Serialization.cs new file mode 100644 index 000000000000..7db33a509273 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ElisionTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ElisionTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(Articles)) + { + writer.WritePropertyName("articles"u8); + writer.WriteStartArray(); + foreach (var item in Articles) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + } + + ElisionTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ElisionTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeElisionTokenFilter(document.RootElement, options); + } + + internal static ElisionTokenFilter DeserializeElisionTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList articles = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("articles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + articles = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ElisionTokenFilter(odataType, name, serializedAdditionalRawData, articles ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ElisionTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + ElisionTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeElisionTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ElisionTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ElisionTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeElisionTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.cs new file mode 100644 index 000000000000..182925b60d74 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ElisionTokenFilter.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Removes elisions. For example, "l'avion" (the plane) will be converted to + /// "avion" (plane). This token filter is implemented using Apache Lucene. + /// + public partial class ElisionTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public ElisionTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.ElisionTokenFilter"; + Articles = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The set of articles to remove. + internal ElisionTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList articles) : base(odataType, name, serializedAdditionalRawData) + { + Articles = articles; + } + + /// Initializes a new instance of for deserialization. + internal ElisionTokenFilter() + { + } + + /// The set of articles to remove. + public IList Articles { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityCategory.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityCategory.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/EntityCategory.cs rename to sdk/search/Azure.Search.Documents/src/Generated/EntityCategory.cs index 85b5da32aa89..d22f635f0f14 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityCategory.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityCategory.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// A string indicating what entity categories to return. public readonly partial struct EntityCategory : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.Serialization.cs new file mode 100644 index 000000000000..abcfb7796bfd --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.Serialization.cs @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class EntityLinkingSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EntityLinkingSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode); + } + if (Optional.IsDefined(MinimumPrecision)) + { + writer.WritePropertyName("minimumPrecision"u8); + writer.WriteNumberValue(MinimumPrecision.Value); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + } + + EntityLinkingSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EntityLinkingSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeEntityLinkingSkill(document.RootElement, options); + } + + internal static EntityLinkingSkill DeserializeEntityLinkingSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string defaultLanguageCode = default; + double? minimumPrecision = default; + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + defaultLanguageCode = property.Value.GetString(); + continue; + } + if (property.NameEquals("minimumPrecision"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumPrecision = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new EntityLinkingSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + minimumPrecision, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(EntityLinkingSkill)} does not support writing '{options.Format}' format."); + } + } + + EntityLinkingSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEntityLinkingSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(EntityLinkingSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new EntityLinkingSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEntityLinkingSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.cs new file mode 100644 index 000000000000..1d6212269cc6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityLinkingSkill.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Using the Text Analytics API, extracts linked entities from text. + public partial class EntityLinkingSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public EntityLinkingSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + internal EntityLinkingSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string defaultLanguageCode, double? minimumPrecision, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + MinimumPrecision = minimumPrecision; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal EntityLinkingSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public string DefaultLanguageCode { get; set; } + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + public double? MinimumPrecision { get; set; } + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.Serialization.cs new file mode 100644 index 000000000000..ac201e302c25 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.Serialization.cs @@ -0,0 +1,247 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class EntityRecognitionSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EntityRecognitionSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(Categories)) + { + writer.WritePropertyName("categories"u8); + writer.WriteStartArray(); + foreach (var item in Categories) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(IncludeTypelessEntities)) + { + writer.WritePropertyName("includeTypelessEntities"u8); + writer.WriteBooleanValue(IncludeTypelessEntities.Value); + } + if (Optional.IsDefined(MinimumPrecision)) + { + writer.WritePropertyName("minimumPrecision"u8); + writer.WriteNumberValue(MinimumPrecision.Value); + } + } + + EntityRecognitionSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(EntityRecognitionSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeEntityRecognitionSkill(document.RootElement, options); + } + + internal static EntityRecognitionSkill DeserializeEntityRecognitionSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList categories = default; + EntityRecognitionSkillLanguage? defaultLanguageCode = default; + bool? includeTypelessEntities = default; + double? minimumPrecision = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("categories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new EntityCategory(item.GetString())); + } + categories = array; + continue; + } + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new EntityRecognitionSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("includeTypelessEntities"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + includeTypelessEntities = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("minimumPrecision"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumPrecision = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new EntityRecognitionSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + categories ?? new ChangeTrackingList(), + defaultLanguageCode, + includeTypelessEntities, + minimumPrecision); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(EntityRecognitionSkill)} does not support writing '{options.Format}' format."); + } + } + + EntityRecognitionSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEntityRecognitionSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(EntityRecognitionSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new EntityRecognitionSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeEntityRecognitionSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.cs new file mode 100644 index 000000000000..a6f9c942f1a5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkill.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// This skill is deprecated. Use the V3.EntityRecognitionSkill instead. + public partial class EntityRecognitionSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public EntityRecognitionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.EntityRecognitionSkill"; + Categories = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A list of entity categories that should be extracted. + /// A value indicating which language code to use. Default is `en`. + /// + /// Determines whether or not to include entities which are well known but don't + /// conform to a pre-defined type. If this configuration is not set (default), set + /// to null or set to false, entities which don't conform to one of the pre-defined + /// types will not be surfaced. + /// + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + internal EntityRecognitionSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, IList categories, EntityRecognitionSkillLanguage? defaultLanguageCode, bool? includeTypelessEntities, double? minimumPrecision) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + Categories = categories; + DefaultLanguageCode = defaultLanguageCode; + IncludeTypelessEntities = includeTypelessEntities; + MinimumPrecision = minimumPrecision; + } + + /// Initializes a new instance of for deserialization. + internal EntityRecognitionSkill() + { + } + + /// A list of entity categories that should be extracted. + public IList Categories { get; } + /// A value indicating which language code to use. Default is `en`. + public EntityRecognitionSkillLanguage? DefaultLanguageCode { get; set; } + /// + /// Determines whether or not to include entities which are well known but don't + /// conform to a pre-defined type. If this configuration is not set (default), set + /// to null or set to false, entities which don't conform to one of the pre-defined + /// types will not be surfaced. + /// + public bool? IncludeTypelessEntities { get; set; } + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + public double? MinimumPrecision { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillLanguage.cs similarity index 97% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillLanguage.cs index 308aae66fdfc..e038bb07870f 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillLanguage.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// Deprecated. The language codes supported for input text by EntityRecognitionSkill. + /// + /// Deprecated. The language codes supported for input text by + /// EntityRecognitionSkill. + /// public readonly partial struct EntityRecognitionSkillLanguage : IEquatable { private readonly string _value; diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.Serialization.cs new file mode 100644 index 000000000000..47d6b8e04ef5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.Serialization.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + internal partial class EntityRecognitionSkillV3 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EntityRecognitionSkillV3)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(Categories)) + { + writer.WritePropertyName("categories"u8); + writer.WriteStartArray(); + foreach (var item in Categories) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode); + } + if (Optional.IsDefined(MinimumPrecision)) + { + writer.WritePropertyName("minimumPrecision"u8); + writer.WriteNumberValue(MinimumPrecision.Value); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + } + + Search.Documents.Indexes.Models.EntityRecognitionSkillV3 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EntityRecognitionSkillV3)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.EntityRecognitionSkillV3.DeserializeEntityRecognitionSkillV3(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.EntityRecognitionSkillV3 DeserializeEntityRecognitionSkillV3(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList categories = default; + string defaultLanguageCode = default; + double? minimumPrecision = default; + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("categories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + categories = array; + continue; + } + if (property.NameEquals("defaultLanguageCode"u8)) + { + defaultLanguageCode = property.Value.GetString(); + continue; + } + if (property.NameEquals("minimumPrecision"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumPrecision = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.EntityRecognitionSkillV3( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + categories ?? new ChangeTrackingList(), + defaultLanguageCode, + minimumPrecision, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EntityRecognitionSkillV3)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.EntityRecognitionSkillV3 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.EntityRecognitionSkillV3.DeserializeEntityRecognitionSkillV3(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.EntityRecognitionSkillV3)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.EntityRecognitionSkillV3 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.EntityRecognitionSkillV3.DeserializeEntityRecognitionSkillV3(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.cs b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.cs new file mode 100644 index 000000000000..17238c861946 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/EntityRecognitionSkillV3.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Using the Text Analytics API, extracts entities of different types from text. + internal partial class EntityRecognitionSkillV3 : Search.Documents.SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public EntityRecognitionSkillV3(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; + Categories = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A list of entity categories that should be extracted. + /// A value indicating which language code to use. Default is `en`. + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + /// + /// The version of the model to use when calling the Text Analytics API. It will + /// default to the latest available when not specified. We recommend you do not + /// specify this value unless absolutely necessary. + /// + internal EntityRecognitionSkillV3(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, IList categories, string defaultLanguageCode, double? minimumPrecision, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + Categories = categories; + DefaultLanguageCode = defaultLanguageCode; + MinimumPrecision = minimumPrecision; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal EntityRecognitionSkillV3() + { + } + + /// A list of entity categories that should be extracted. + public IList Categories { get; } + /// A value indicating which language code to use. Default is `en`. + public string DefaultLanguageCode { get; set; } + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + public double? MinimumPrecision { get; set; } + /// + /// The version of the model to use when calling the Text Analytics API. It will + /// default to the latest available when not specified. We recommend you do not + /// specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs new file mode 100644 index 000000000000..b72d8d1b893a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ExhaustiveKnnAlgorithmConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExhaustiveKnnAlgorithmConfiguration)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("exhaustiveKnnParameters"u8); + writer.WriteObjectValue(Parameters, options); + } + } + + ExhaustiveKnnAlgorithmConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExhaustiveKnnAlgorithmConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeExhaustiveKnnAlgorithmConfiguration(document.RootElement, options); + } + + internal static ExhaustiveKnnAlgorithmConfiguration DeserializeExhaustiveKnnAlgorithmConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ExhaustiveKnnParameters exhaustiveKnnParameters = default; + string name = default; + VectorSearchAlgorithmKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("exhaustiveKnnParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustiveKnnParameters = ExhaustiveKnnParameters.DeserializeExhaustiveKnnParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchAlgorithmKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ExhaustiveKnnAlgorithmConfiguration(name, kind, serializedAdditionalRawData, exhaustiveKnnParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ExhaustiveKnnAlgorithmConfiguration)} does not support writing '{options.Format}' format."); + } + } + + ExhaustiveKnnAlgorithmConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeExhaustiveKnnAlgorithmConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ExhaustiveKnnAlgorithmConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ExhaustiveKnnAlgorithmConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeExhaustiveKnnAlgorithmConfiguration(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.cs new file mode 100644 index 000000000000..33de8cb1adb6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnAlgorithmConfiguration.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the exhaustive KNN algorithm used + /// during querying, which will perform brute-force search across the entire vector + /// index. + /// + public partial class ExhaustiveKnnAlgorithmConfiguration : VectorSearchAlgorithmConfiguration + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + public ExhaustiveKnnAlgorithmConfiguration(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + Kind = VectorSearchAlgorithmKind.ExhaustiveKnn; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// Type of VectorSearchAlgorithmConfiguration. + /// Keeps track of any properties unknown to the library. + /// Contains the parameters specific to exhaustive KNN algorithm. + internal ExhaustiveKnnAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, IDictionary serializedAdditionalRawData, ExhaustiveKnnParameters parameters) : base(name, kind, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal ExhaustiveKnnAlgorithmConfiguration() + { + } + + /// Contains the parameters specific to exhaustive KNN algorithm. + public ExhaustiveKnnParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.Serialization.cs new file mode 100644 index 000000000000..5c284b8c9085 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ExhaustiveKnnParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExhaustiveKnnParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Metric)) + { + writer.WritePropertyName("metric"u8); + writer.WriteStringValue(Metric.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ExhaustiveKnnParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExhaustiveKnnParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeExhaustiveKnnParameters(document.RootElement, options); + } + + internal static ExhaustiveKnnParameters DeserializeExhaustiveKnnParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + VectorSearchAlgorithmMetric? metric = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("metric"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + metric = new VectorSearchAlgorithmMetric(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ExhaustiveKnnParameters(metric, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ExhaustiveKnnParameters)} does not support writing '{options.Format}' format."); + } + } + + ExhaustiveKnnParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeExhaustiveKnnParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ExhaustiveKnnParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ExhaustiveKnnParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeExhaustiveKnnParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.cs new file mode 100644 index 000000000000..b62bd973ac7c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ExhaustiveKnnParameters.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains the parameters specific to exhaustive KNN algorithm. + public partial class ExhaustiveKnnParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ExhaustiveKnnParameters() + { + } + + /// Initializes a new instance of . + /// The similarity metric to use for vector comparisons. + /// Keeps track of any properties unknown to the library. + internal ExhaustiveKnnParameters(VectorSearchAlgorithmMetric? metric, IDictionary serializedAdditionalRawData) + { + Metric = metric; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The similarity metric to use for vector comparisons. + public VectorSearchAlgorithmMetric? Metric { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.Serialization.cs new file mode 100644 index 000000000000..184216c83486 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.Serialization.cs @@ -0,0 +1,206 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class FacetResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FacetResult)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Count)) + { + writer.WritePropertyName("count"u8); + writer.WriteNumberValue(Count.Value); + } + if (options.Format != "W" && Optional.IsCollectionDefined(Facets)) + { + writer.WritePropertyName("@search.facets"u8); + writer.WriteStartObject(); + foreach (var item in Facets) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStartArray(); + foreach (var item0 in item.Value) + { + writer.WriteObjectValue(item0, options); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && Optional.IsDefined(Sum)) + { + writer.WritePropertyName("sum"u8); + writer.WriteNumberValue(Sum.Value); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + FacetResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FacetResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFacetResult(document.RootElement, options); + } + + internal static FacetResult DeserializeFacetResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long? count = default; + IReadOnlyDictionary> searchFacets = default; + long? sum = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("count"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + count = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("@search.facets"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary> dictionary = new Dictionary>(); + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property0.Name, null); + } + else + { + List array = new List(); + foreach (var item in property0.Value.EnumerateArray()) + { + array.Add(DeserializeFacetResult(item, options)); + } + dictionary.Add(property0.Name, array); + } + } + searchFacets = dictionary; + continue; + } + if (property.NameEquals("sum"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sum = property.Value.GetInt64(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new FacetResult(count, searchFacets ?? new ChangeTrackingDictionary>(), sum, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FacetResult)} does not support writing '{options.Format}' format."); + } + } + + FacetResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFacetResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FacetResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FacetResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFacetResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.cs new file mode 100644 index 000000000000..271ba678ccfd --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FacetResult.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A single bucket of a facet query result. Reports the number of documents with a + /// field value falling within a particular range or having a particular value or + /// interval. + /// + internal partial class FacetResult + { + /// Initializes a new instance of . + internal FacetResult() + { + Facets = new ChangeTrackingDictionary>(); + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// The approximate count of documents falling within the bucket described by this + /// facet. + /// + /// + /// The nested facet query results for the search operation, organized as a + /// collection of buckets for each faceted field; null if the query did not contain + /// any nested facets. + /// + /// The resulting total sum for the facet when a sum metric is requested. + /// Additional Properties. + internal FacetResult(long? count, IReadOnlyDictionary> facets, long? sum, IReadOnlyDictionary additionalProperties) + { + Count = count; + Facets = facets; + Sum = sum; + AdditionalProperties = additionalProperties; + } + + /// + /// The approximate count of documents falling within the bucket described by this + /// facet. + /// + public long? Count { get; } + /// + /// The nested facet query results for the search operation, organized as a + /// collection of buckets for each faceted field; null if the query did not contain + /// any nested facets. + /// + public IReadOnlyDictionary> Facets { get; } + /// The resulting total sum for the facet when a sum metric is requested. + public long? Sum { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.Serialization.cs new file mode 100644 index 000000000000..86758807daef --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.Serialization.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class FieldMapping : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FieldMapping)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("sourceFieldName"u8); + writer.WriteStringValue(SourceFieldName); + if (Optional.IsDefined(TargetFieldName)) + { + writer.WritePropertyName("targetFieldName"u8); + writer.WriteStringValue(TargetFieldName); + } + if (Optional.IsDefined(MappingFunction)) + { + writer.WritePropertyName("mappingFunction"u8); + writer.WriteObjectValue(MappingFunction, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + FieldMapping IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FieldMapping)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFieldMapping(document.RootElement, options); + } + + internal static FieldMapping DeserializeFieldMapping(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sourceFieldName = default; + string targetFieldName = default; + FieldMappingFunction mappingFunction = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("sourceFieldName"u8)) + { + sourceFieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("targetFieldName"u8)) + { + targetFieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("mappingFunction"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mappingFunction = FieldMappingFunction.DeserializeFieldMappingFunction(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FieldMapping(sourceFieldName, targetFieldName, mappingFunction, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FieldMapping)} does not support writing '{options.Format}' format."); + } + } + + FieldMapping IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFieldMapping(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FieldMapping)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FieldMapping FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFieldMapping(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.cs b/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.cs new file mode 100644 index 000000000000..c26a1b65bbf4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FieldMapping.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a mapping between a field in a data source and a target field in an + /// index. + /// + public partial class FieldMapping + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the field in the data source. + /// is null. + public FieldMapping(string sourceFieldName) + { + Argument.AssertNotNull(sourceFieldName, nameof(sourceFieldName)); + + SourceFieldName = sourceFieldName; + } + + /// Initializes a new instance of . + /// The name of the field in the data source. + /// + /// The name of the target field in the index. Same as the source field name by + /// default. + /// + /// A function to apply to each source field value before indexing. + /// Keeps track of any properties unknown to the library. + internal FieldMapping(string sourceFieldName, string targetFieldName, FieldMappingFunction mappingFunction, IDictionary serializedAdditionalRawData) + { + SourceFieldName = sourceFieldName; + TargetFieldName = targetFieldName; + MappingFunction = mappingFunction; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FieldMapping() + { + } + + /// The name of the field in the data source. + public string SourceFieldName { get; set; } + /// + /// The name of the target field in the index. Same as the source field name by + /// default. + /// + public string TargetFieldName { get; set; } + /// A function to apply to each source field value before indexing. + public FieldMappingFunction MappingFunction { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.Serialization.cs new file mode 100644 index 000000000000..9eb27fd4866b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.Serialization.cs @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class FieldMappingFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FieldMappingFunction)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsCollectionDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteStartObject(); + foreach (var item in Parameters) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + FieldMappingFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FieldMappingFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFieldMappingFunction(document.RootElement, options); + } + + internal static FieldMappingFunction DeserializeFieldMappingFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + IDictionary parameters = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property0.Name, null); + } + else + { + dictionary.Add(property0.Name, BinaryData.FromString(property0.Value.GetRawText())); + } + } + parameters = dictionary; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FieldMappingFunction(name, parameters ?? new ChangeTrackingDictionary(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FieldMappingFunction)} does not support writing '{options.Format}' format."); + } + } + + FieldMappingFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFieldMappingFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FieldMappingFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FieldMappingFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFieldMappingFunction(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.cs new file mode 100644 index 000000000000..1400f6169d6b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FieldMappingFunction.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Represents a function that transforms a value from a data source before + /// indexing. + /// + public partial class FieldMappingFunction + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the field mapping function. + /// is null. + public FieldMappingFunction(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + Parameters = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The name of the field mapping function. + /// + /// A dictionary of parameter name/value pairs to pass to the function. Each value + /// must be of a primitive type. + /// + /// Keeps track of any properties unknown to the library. + internal FieldMappingFunction(string name, IDictionary parameters, IDictionary serializedAdditionalRawData) + { + Name = name; + Parameters = parameters; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FieldMappingFunction() + { + } + + /// The name of the field mapping function. + public string Name { get; set; } + /// + /// A dictionary of parameter name/value pairs to pass to the function. Each value + /// must be of a primitive type. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary Parameters { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.Serialization.cs new file mode 100644 index 000000000000..f8099533a5b2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class FreshnessScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FreshnessScoringFunction)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("freshness"u8); + writer.WriteObjectValue(Parameters, options); + } + + FreshnessScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FreshnessScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFreshnessScoringFunction(document.RootElement, options); + } + + internal static FreshnessScoringFunction DeserializeFreshnessScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FreshnessScoringParameters freshness = default; + string fieldName = default; + double boost = default; + ScoringFunctionInterpolation? interpolation = default; + string type = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("freshness"u8)) + { + freshness = FreshnessScoringParameters.DeserializeFreshnessScoringParameters(property.Value, options); + continue; + } + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("boost"u8)) + { + boost = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("interpolation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + interpolation = new ScoringFunctionInterpolation(property.Value.GetString()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FreshnessScoringFunction( + fieldName, + boost, + interpolation, + type, + serializedAdditionalRawData, + freshness); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FreshnessScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + FreshnessScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFreshnessScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FreshnessScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new FreshnessScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFreshnessScoringFunction(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.cs new file mode 100644 index 000000000000..e1721b5aad99 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringFunction.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Defines a function that boosts scores based on the value of a date-time field. + public partial class FreshnessScoringFunction : ScoringFunction + { + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// Parameter values for the freshness scoring function. + /// or is null. + public FreshnessScoringFunction(string fieldName, double boost, FreshnessScoringParameters parameters) : base(fieldName, boost) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + Argument.AssertNotNull(parameters, nameof(parameters)); + + Type = "freshness"; + Parameters = parameters; + } + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + /// Parameter values for the freshness scoring function. + internal FreshnessScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData, FreshnessScoringParameters parameters) : base(fieldName, boost, interpolation, type, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal FreshnessScoringFunction() + { + } + + /// Parameter values for the freshness scoring function. + public FreshnessScoringParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.Serialization.cs new file mode 100644 index 000000000000..5642060c9869 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.Serialization.cs @@ -0,0 +1,142 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class FreshnessScoringParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FreshnessScoringParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("boostingDuration"u8); + writer.WriteStringValue(BoostingDuration, "P"); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + FreshnessScoringParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FreshnessScoringParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFreshnessScoringParameters(document.RootElement, options); + } + + internal static FreshnessScoringParameters DeserializeFreshnessScoringParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TimeSpan boostingDuration = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("boostingDuration"u8)) + { + boostingDuration = property.Value.GetTimeSpan("P"); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FreshnessScoringParameters(boostingDuration, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FreshnessScoringParameters)} does not support writing '{options.Format}' format."); + } + } + + FreshnessScoringParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFreshnessScoringParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FreshnessScoringParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FreshnessScoringParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeFreshnessScoringParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.cs new file mode 100644 index 000000000000..a6de82d56a4d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/FreshnessScoringParameters.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Provides parameter values to a freshness scoring function. + public partial class FreshnessScoringParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The expiration period after which boosting will stop for a particular document. + public FreshnessScoringParameters(TimeSpan boostingDuration) + { + BoostingDuration = boostingDuration; + } + + /// Initializes a new instance of . + /// The expiration period after which boosting will stop for a particular document. + /// Keeps track of any properties unknown to the library. + internal FreshnessScoringParameters(TimeSpan boostingDuration, IDictionary serializedAdditionalRawData) + { + BoostingDuration = boostingDuration; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FreshnessScoringParameters() + { + } + + /// The expiration period after which boosting will stop for a particular document. + public TimeSpan BoostingDuration { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..fb12e9ca2475 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class HighWaterMarkChangeDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HighWaterMarkChangeDetectionPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("highWaterMarkColumnName"u8); + writer.WriteStringValue(HighWaterMarkColumnName); + } + + HighWaterMarkChangeDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HighWaterMarkChangeDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHighWaterMarkChangeDetectionPolicy(document.RootElement, options); + } + + internal static HighWaterMarkChangeDetectionPolicy DeserializeHighWaterMarkChangeDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string highWaterMarkColumnName = default; + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("highWaterMarkColumnName"u8)) + { + highWaterMarkColumnName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HighWaterMarkChangeDetectionPolicy(odataType, serializedAdditionalRawData, highWaterMarkColumnName); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HighWaterMarkChangeDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + HighWaterMarkChangeDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHighWaterMarkChangeDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HighWaterMarkChangeDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new HighWaterMarkChangeDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHighWaterMarkChangeDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.cs new file mode 100644 index 000000000000..88b5b9b8e3c1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HighWaterMarkChangeDetectionPolicy.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a data change detection policy that captures changes based on the value + /// of a high water mark column. + /// + public partial class HighWaterMarkChangeDetectionPolicy : DataChangeDetectionPolicy + { + /// Initializes a new instance of . + /// The name of the high water mark column. + /// is null. + public HighWaterMarkChangeDetectionPolicy(string highWaterMarkColumnName) + { + Argument.AssertNotNull(highWaterMarkColumnName, nameof(highWaterMarkColumnName)); + + OdataType = "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; + HighWaterMarkColumnName = highWaterMarkColumnName; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + /// The name of the high water mark column. + internal HighWaterMarkChangeDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData, string highWaterMarkColumnName) : base(odataType, serializedAdditionalRawData) + { + HighWaterMarkColumnName = highWaterMarkColumnName; + } + + /// Initializes a new instance of for deserialization. + internal HighWaterMarkChangeDetectionPolicy() + { + } + + /// The name of the high water mark column. + public string HighWaterMarkColumnName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.Serialization.cs new file mode 100644 index 000000000000..9c7abbde887b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class HnswAlgorithmConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HnswAlgorithmConfiguration)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("hnswParameters"u8); + writer.WriteObjectValue(Parameters, options); + } + } + + HnswAlgorithmConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HnswAlgorithmConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHnswAlgorithmConfiguration(document.RootElement, options); + } + + internal static HnswAlgorithmConfiguration DeserializeHnswAlgorithmConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + HnswParameters hnswParameters = default; + string name = default; + VectorSearchAlgorithmKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("hnswParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + hnswParameters = HnswParameters.DeserializeHnswParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchAlgorithmKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HnswAlgorithmConfiguration(name, kind, serializedAdditionalRawData, hnswParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HnswAlgorithmConfiguration)} does not support writing '{options.Format}' format."); + } + } + + HnswAlgorithmConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHnswAlgorithmConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HnswAlgorithmConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new HnswAlgorithmConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHnswAlgorithmConfiguration(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.cs new file mode 100644 index 000000000000..b9694ba10d92 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HnswAlgorithmConfiguration.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the HNSW approximate nearest + /// neighbors algorithm used during indexing and querying. The HNSW algorithm + /// offers a tunable trade-off between search speed and accuracy. + /// + public partial class HnswAlgorithmConfiguration : VectorSearchAlgorithmConfiguration + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + public HnswAlgorithmConfiguration(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + Kind = VectorSearchAlgorithmKind.Hnsw; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// Type of VectorSearchAlgorithmConfiguration. + /// Keeps track of any properties unknown to the library. + /// Contains the parameters specific to HNSW algorithm. + internal HnswAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, IDictionary serializedAdditionalRawData, HnswParameters parameters) : base(name, kind, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal HnswAlgorithmConfiguration() + { + } + + /// Contains the parameters specific to HNSW algorithm. + public HnswParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.Serialization.cs new file mode 100644 index 000000000000..74331a7b2346 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.Serialization.cs @@ -0,0 +1,194 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class HnswParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HnswParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(M)) + { + writer.WritePropertyName("m"u8); + writer.WriteNumberValue(M.Value); + } + if (Optional.IsDefined(EfConstruction)) + { + writer.WritePropertyName("efConstruction"u8); + writer.WriteNumberValue(EfConstruction.Value); + } + if (Optional.IsDefined(EfSearch)) + { + writer.WritePropertyName("efSearch"u8); + writer.WriteNumberValue(EfSearch.Value); + } + if (Optional.IsDefined(Metric)) + { + writer.WritePropertyName("metric"u8); + writer.WriteStringValue(Metric.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + HnswParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HnswParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHnswParameters(document.RootElement, options); + } + + internal static HnswParameters DeserializeHnswParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? m = default; + int? efConstruction = default; + int? efSearch = default; + VectorSearchAlgorithmMetric? metric = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("m"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + m = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("efConstruction"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + efConstruction = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("efSearch"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + efSearch = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("metric"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + metric = new VectorSearchAlgorithmMetric(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HnswParameters(m, efConstruction, efSearch, metric, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HnswParameters)} does not support writing '{options.Format}' format."); + } + } + + HnswParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHnswParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HnswParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static HnswParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHnswParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.cs new file mode 100644 index 000000000000..a29c5f85f63b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HnswParameters.cs @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains the parameters specific to the HNSW algorithm. + public partial class HnswParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public HnswParameters() + { + } + + /// Initializes a new instance of . + /// + /// The number of bi-directional links created for every new element during + /// construction. Increasing this parameter value may improve recall and reduce + /// retrieval times for datasets with high intrinsic dimensionality at the expense + /// of increased memory consumption and longer indexing time. + /// + /// + /// The size of the dynamic list containing the nearest neighbors, which is used + /// during index time. Increasing this parameter may improve index quality, at the + /// expense of increased indexing time. At a certain point, increasing this + /// parameter leads to diminishing returns. + /// + /// + /// The size of the dynamic list containing the nearest neighbors, which is used + /// during search time. Increasing this parameter may improve search results, at + /// the expense of slower search. At a certain point, increasing this parameter + /// leads to diminishing returns. + /// + /// The similarity metric to use for vector comparisons. + /// Keeps track of any properties unknown to the library. + internal HnswParameters(int? m, int? efConstruction, int? efSearch, VectorSearchAlgorithmMetric? metric, IDictionary serializedAdditionalRawData) + { + M = m; + EfConstruction = efConstruction; + EfSearch = efSearch; + Metric = metric; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The number of bi-directional links created for every new element during + /// construction. Increasing this parameter value may improve recall and reduce + /// retrieval times for datasets with high intrinsic dimensionality at the expense + /// of increased memory consumption and longer indexing time. + /// + public int? M { get; set; } + /// + /// The size of the dynamic list containing the nearest neighbors, which is used + /// during index time. Increasing this parameter may improve index quality, at the + /// expense of increased indexing time. At a certain point, increasing this + /// parameter leads to diminishing returns. + /// + public int? EfConstruction { get; set; } + /// + /// The size of the dynamic list containing the nearest neighbors, which is used + /// during search time. Increasing this parameter may improve search results, at + /// the expense of slower search. At a certain point, increasing this parameter + /// leads to diminishing returns. + /// + public int? EfSearch { get; set; } + /// The similarity metric to use for vector comparisons. + public VectorSearchAlgorithmMetric? Metric { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridCountAndFacetMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/HybridCountAndFacetMode.cs similarity index 75% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/HybridCountAndFacetMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/HybridCountAndFacetMode.cs index 0920c3a606bc..3016a920953f 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridCountAndFacetMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/HybridCountAndFacetMode.cs @@ -8,9 +8,13 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { - /// Determines whether the count and facets should includes all documents that matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' window. The default value is 'countAllResults'. + /// + /// Determines whether the count and facets should includes all documents that + /// matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' + /// window. The default value is 'countAllResults'. + /// public readonly partial struct HybridCountAndFacetMode : IEquatable { private readonly string _value; @@ -25,9 +29,16 @@ public HybridCountAndFacetMode(string value) private const string CountRetrievableResultsValue = "countRetrievableResults"; private const string CountAllResultsValue = "countAllResults"; - /// Only include documents that were matched within the 'maxTextRecallSize' retrieval window when computing 'count' and 'facets'. + /// + /// Only include documents that were matched within the 'maxTextRecallSize' + /// retrieval window when computing 'count' and 'facets'. + /// public static HybridCountAndFacetMode CountRetrievableResults { get; } = new HybridCountAndFacetMode(CountRetrievableResultsValue); - /// Include all documents that were matched by the search query when computing 'count' and 'facets', regardless of whether or not those documents are within the 'maxTextRecallSize' retrieval window. + /// + /// Include all documents that were matched by the search query when computing 'count' + /// and 'facets', regardless of whether or not those documents are within + /// the 'maxTextRecallSize' retrieval window. + /// public static HybridCountAndFacetMode CountAllResults { get; } = new HybridCountAndFacetMode(CountAllResultsValue); /// Determines if two values are the same. public static bool operator ==(HybridCountAndFacetMode left, HybridCountAndFacetMode right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.Serialization.cs new file mode 100644 index 000000000000..196836418320 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.Serialization.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class HybridSearch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HybridSearch)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(MaxTextRecallSize)) + { + writer.WritePropertyName("maxTextRecallSize"u8); + writer.WriteNumberValue(MaxTextRecallSize.Value); + } + if (Optional.IsDefined(CountAndFacetMode)) + { + writer.WritePropertyName("countAndFacetMode"u8); + writer.WriteStringValue(CountAndFacetMode.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + HybridSearch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HybridSearch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHybridSearch(document.RootElement, options); + } + + internal static HybridSearch DeserializeHybridSearch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTextRecallSize = default; + HybridCountAndFacetMode? countAndFacetMode = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTextRecallSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTextRecallSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("countAndFacetMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + countAndFacetMode = new HybridCountAndFacetMode(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HybridSearch(maxTextRecallSize, countAndFacetMode, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HybridSearch)} does not support writing '{options.Format}' format."); + } + } + + HybridSearch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHybridSearch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HybridSearch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static HybridSearch FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeHybridSearch(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.cs new file mode 100644 index 000000000000..bddc99fa0dca --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/HybridSearch.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// TThe query parameters to configure hybrid search behaviors. + public partial class HybridSearch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public HybridSearch() + { + } + + /// Initializes a new instance of . + /// + /// Determines the maximum number of documents to be retrieved by the text query + /// portion of a hybrid search request. Those documents will be combined with the + /// documents matching the vector queries to produce a single final list of + /// results. Choosing a larger maxTextRecallSize value will allow retrieving and + /// paging through more documents (using the top and skip parameters), at the cost + /// of higher resource utilization and higher latency. The value needs to be + /// between 1 and 10,000. Default is 1000. + /// + /// + /// Determines whether the count and facets should includes all documents that + /// matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' window. + /// + /// Keeps track of any properties unknown to the library. + internal HybridSearch(int? maxTextRecallSize, HybridCountAndFacetMode? countAndFacetMode, IDictionary serializedAdditionalRawData) + { + MaxTextRecallSize = maxTextRecallSize; + CountAndFacetMode = countAndFacetMode; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// Determines the maximum number of documents to be retrieved by the text query + /// portion of a hybrid search request. Those documents will be combined with the + /// documents matching the vector queries to produce a single final list of + /// results. Choosing a larger maxTextRecallSize value will allow retrieving and + /// paging through more documents (using the top and skip parameters), at the cost + /// of higher resource utilization and higher latency. The value needs to be + /// between 1 and 10,000. Default is 1000. + /// + public int? MaxTextRecallSize { get; set; } + /// + /// Determines whether the count and facets should includes all documents that + /// matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' window. + /// + public HybridCountAndFacetMode? CountAndFacetMode { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.Serialization.cs new file mode 100644 index 000000000000..b51688ca7153 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.Serialization.cs @@ -0,0 +1,241 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ImageAnalysisSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ImageAnalysisSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsCollectionDefined(VisualFeatures)) + { + writer.WritePropertyName("visualFeatures"u8); + writer.WriteStartArray(); + foreach (var item in VisualFeatures) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Details)) + { + writer.WritePropertyName("details"u8); + writer.WriteStartArray(); + foreach (var item in Details) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + } + + ImageAnalysisSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ImageAnalysisSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeImageAnalysisSkill(document.RootElement, options); + } + + internal static ImageAnalysisSkill DeserializeImageAnalysisSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ImageAnalysisSkillLanguage? defaultLanguageCode = default; + IList visualFeatures = default; + IList details = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new ImageAnalysisSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("visualFeatures"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new VisualFeature(item.GetString())); + } + visualFeatures = array; + continue; + } + if (property.NameEquals("details"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new ImageDetail(item.GetString())); + } + details = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ImageAnalysisSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + visualFeatures ?? new ChangeTrackingList(), + details ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ImageAnalysisSkill)} does not support writing '{options.Format}' format."); + } + } + + ImageAnalysisSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeImageAnalysisSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ImageAnalysisSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ImageAnalysisSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeImageAnalysisSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.cs new file mode 100644 index 000000000000..f9acf37cd210 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkill.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill that analyzes image files. It extracts a rich set of visual features + /// based on the image content. + /// + public partial class ImageAnalysisSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public ImageAnalysisSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Vision.ImageAnalysisSkill"; + VisualFeatures = new ChangeTrackingList(); + Details = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// A list of visual features. + /// A string indicating which domain-specific details to return. + internal ImageAnalysisSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, ImageAnalysisSkillLanguage? defaultLanguageCode, IList visualFeatures, IList details) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + VisualFeatures = visualFeatures; + Details = details; + } + + /// Initializes a new instance of for deserialization. + internal ImageAnalysisSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public ImageAnalysisSkillLanguage? DefaultLanguageCode { get; set; } + /// A list of visual features. + public IList VisualFeatures { get; } + /// A string indicating which domain-specific details to return. + public IList Details { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkillLanguage.cs index 923d8a1ed17b..f232b76108c9 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/ImageAnalysisSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input by ImageAnalysisSkill. public readonly partial struct ImageAnalysisSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageDetail.cs b/sdk/search/Azure.Search.Documents/src/Generated/ImageDetail.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/ImageDetail.cs rename to sdk/search/Azure.Search.Documents/src/Generated/ImageDetail.cs index 3d9dfd9c6f11..fe9f8dfaf8bc 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageDetail.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/ImageDetail.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// A string indicating which domain-specific details to return. public readonly partial struct ImageDetail : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.Serialization.cs new file mode 100644 index 000000000000..a456f69a8cde --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class IndexAction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexAction)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ActionType)) + { + writer.WritePropertyName("@search.action"u8); + writer.WriteStringValue(ActionType.Value.ToString()); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + IndexAction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexAction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexAction(document.RootElement, options); + } + + internal static IndexAction DeserializeIndexAction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IndexActionType? searchAction = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@search.action"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchAction = new IndexActionType(property.Value.GetString()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new IndexAction(searchAction, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexAction)} does not support writing '{options.Format}' format."); + } + } + + IndexAction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexAction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexAction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexAction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexAction(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.cs new file mode 100644 index 000000000000..0628a89b47d6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexAction.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// Represents an index action that operates on a document. + internal partial class IndexAction + { + /// Initializes a new instance of . + public IndexAction() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The operation to perform on a document in an indexing batch. + /// Additional Properties. + internal IndexAction(IndexActionType? actionType, IDictionary additionalProperties) + { + ActionType = actionType; + AdditionalProperties = additionalProperties; + } + + /// The operation to perform on a document in an indexing batch. + public IndexActionType? ActionType { get; set; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexActionType.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexActionType.cs new file mode 100644 index 000000000000..0ae53c640b84 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexActionType.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// The operation to perform on a document in an indexing batch. + public readonly partial struct IndexActionType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public IndexActionType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string UploadValue = "upload"; + private const string MergeValue = "merge"; + private const string MergeOrUploadValue = "mergeOrUpload"; + private const string DeleteValue = "delete"; + + /// + /// Inserts the document into the index if it is new and updates it if it exists. + /// All fields are replaced in the update case. + /// + public static IndexActionType Upload { get; } = new IndexActionType(UploadValue); + /// + /// Merges the specified field values with an existing document. If the document + /// does not exist, the merge will fail. Any field you specify in a merge will + /// replace the existing field in the document. This also applies to collections of + /// primitive and complex types. + /// + public static IndexActionType Merge { get; } = new IndexActionType(MergeValue); + /// + /// Behaves like merge if a document with the given key already exists in the + /// index. If the document does not exist, it behaves like upload with a new + /// document. + /// + public static IndexActionType MergeOrUpload { get; } = new IndexActionType(MergeOrUploadValue); + /// + /// Removes the specified document from the index. Any field you specify in a + /// delete operation other than the key field will be ignored. If you want to + /// remove an individual field from a document, use merge instead and set the field + /// explicitly to null. + /// + public static IndexActionType Delete { get; } = new IndexActionType(DeleteValue); + /// Determines if two values are the same. + public static bool operator ==(IndexActionType left, IndexActionType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(IndexActionType left, IndexActionType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator IndexActionType(string value) => new IndexActionType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is IndexActionType other && Equals(other); + /// + public bool Equals(IndexActionType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.Serialization.cs new file mode 100644 index 000000000000..58f69b5444f8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class IndexBatch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexBatch)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Actions) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexBatch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexBatch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexBatch(document.RootElement, options); + } + + internal static IndexBatch DeserializeIndexBatch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(IndexAction.DeserializeIndexAction(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexBatch(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexBatch)} does not support writing '{options.Format}' format."); + } + } + + IndexBatch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexBatch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexBatch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexBatch FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexBatch(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.cs new file mode 100644 index 000000000000..17d8ab665e8b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexBatch.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents.Models +{ + /// Contains a batch of document write actions to send to the index. + internal partial class IndexBatch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The actions in the batch. + /// is null. + public IndexBatch(IEnumerable actions) + { + Argument.AssertNotNull(actions, nameof(actions)); + + Actions = actions.ToList(); + } + + /// Initializes a new instance of . + /// The actions in the batch. + /// Keeps track of any properties unknown to the library. + internal IndexBatch(IList actions, IDictionary serializedAdditionalRawData) + { + Actions = actions; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IndexBatch() + { + } + + /// The actions in the batch. + public IList Actions { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.Serialization.cs new file mode 100644 index 000000000000..f38dc4ae86ae --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + public partial class IndexDocumentsResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexDocumentsResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Results) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexDocumentsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexDocumentsResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexDocumentsResult(document.RootElement, options); + } + + internal static IndexDocumentsResult DeserializeIndexDocumentsResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(IndexingResult.DeserializeIndexingResult(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexDocumentsResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexDocumentsResult)} does not support writing '{options.Format}' format."); + } + } + + IndexDocumentsResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexDocumentsResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexDocumentsResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexDocumentsResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexDocumentsResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.cs new file mode 100644 index 000000000000..b2b92502404c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexDocumentsResult.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + /// + /// Response containing the status of operations for all documents in the indexing + /// request. + /// + public partial class IndexDocumentsResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The list of status information for each document in the indexing request. + /// is null. + internal IndexDocumentsResult(IEnumerable results) + { + Argument.AssertNotNull(results, nameof(results)); + + Results = results.ToList(); + } + + /// Initializes a new instance of . + /// The list of status information for each document in the indexing request. + /// Keeps track of any properties unknown to the library. + internal IndexDocumentsResult(IReadOnlyList results, IDictionary serializedAdditionalRawData) + { + Results = results; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IndexDocumentsResult() + { + } + + /// The list of status information for each document in the indexing request. + public IReadOnlyList Results { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexProjectionMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexProjectionMode.cs similarity index 83% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/IndexProjectionMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/IndexProjectionMode.cs index 1c5f5257020d..29609a40fb18 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexProjectionMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexProjectionMode.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// Defines behavior of the index projections in relation to the rest of the indexer. + /// + /// Defines behavior of the index projections in relation to the rest of the + /// indexer. + /// public readonly partial struct IndexProjectionMode : IEquatable { private readonly string _value; @@ -25,9 +28,15 @@ public IndexProjectionMode(string value) private const string SkipIndexingParentDocumentsValue = "skipIndexingParentDocuments"; private const string IncludeIndexingParentDocumentsValue = "includeIndexingParentDocuments"; - /// The source document will be skipped from writing into the indexer's target index. + /// + /// The source document will be skipped from writing into the indexer's target + /// index. + /// public static IndexProjectionMode SkipIndexingParentDocuments { get; } = new IndexProjectionMode(SkipIndexingParentDocumentsValue); - /// The source document will be written into the indexer's target index. This is the default pattern. + /// + /// The source document will be written into the indexer's target index. This is + /// the default pattern. + /// public static IndexProjectionMode IncludeIndexingParentDocuments { get; } = new IndexProjectionMode(IncludeIndexingParentDocumentsValue); /// Determines if two values are the same. public static bool operator ==(IndexProjectionMode left, IndexProjectionMode right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.Serialization.cs new file mode 100644 index 000000000000..7633eacbaa1c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class IndexStatisticsSummary : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexStatisticsSummary)} does not support writing '{format}' format."); + } + + if (options.Format != "W") + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (options.Format != "W") + { + writer.WritePropertyName("documentCount"u8); + writer.WriteNumberValue(DocumentCount); + } + if (options.Format != "W") + { + writer.WritePropertyName("storageSize"u8); + writer.WriteNumberValue(StorageSize); + } + if (options.Format != "W" && Optional.IsDefined(VectorIndexSize)) + { + writer.WritePropertyName("vectorIndexSize"u8); + writer.WriteNumberValue(VectorIndexSize.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexStatisticsSummary IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexStatisticsSummary)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexStatisticsSummary(document.RootElement, options); + } + + internal static IndexStatisticsSummary DeserializeIndexStatisticsSummary(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + long documentCount = default; + long storageSize = default; + long? vectorIndexSize = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("documentCount"u8)) + { + documentCount = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("storageSize"u8)) + { + storageSize = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("vectorIndexSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + vectorIndexSize = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexStatisticsSummary(name, documentCount, storageSize, vectorIndexSize, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexStatisticsSummary)} does not support writing '{options.Format}' format."); + } + } + + IndexStatisticsSummary IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexStatisticsSummary(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexStatisticsSummary)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexStatisticsSummary FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexStatisticsSummary(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.cs new file mode 100644 index 000000000000..9aebb1b00c3f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexStatisticsSummary.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Statistics for a given index. Statistics are collected periodically and are not guaranteed to always be up-to-date. + public partial class IndexStatisticsSummary + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal IndexStatisticsSummary() + { + } + + /// Initializes a new instance of . + /// The name of the index. + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// The amount of memory in bytes consumed by vectors in the index. + /// Keeps track of any properties unknown to the library. + internal IndexStatisticsSummary(string name, long documentCount, long storageSize, long? vectorIndexSize, IDictionary serializedAdditionalRawData) + { + Name = name; + DocumentCount = documentCount; + StorageSize = storageSize; + VectorIndexSize = vectorIndexSize; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The name of the index. + public string Name { get; } + /// The number of documents in the index. + public long DocumentCount { get; } + /// The amount of storage in bytes consumed by the index. + public long StorageSize { get; } + /// The amount of memory in bytes consumed by vectors in the index. + public long? VectorIndexSize { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionEnvironment.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionEnvironment.cs similarity index 80% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionEnvironment.cs rename to sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionEnvironment.cs index 86dae2521014..081e3d64d9fc 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionEnvironment.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionEnvironment.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Specifies the environment in which the indexer should execute. public readonly partial struct IndexerExecutionEnvironment : IEquatable @@ -25,9 +25,18 @@ public IndexerExecutionEnvironment(string value) private const string StandardValue = "standard"; private const string PrivateValue = "private"; - /// Indicates that the search service can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. + /// + /// Indicates that the search service can determine where the indexer should + /// execute. This is the default environment when nothing is specified and is the + /// recommended value. + /// public static IndexerExecutionEnvironment Standard { get; } = new IndexerExecutionEnvironment(StandardValue); - /// Indicates that the indexer should run with the environment provisioned specifically for the search service. This should only be specified as the execution environment if the indexer needs to access resources securely over shared private link resources. + /// + /// Indicates that the indexer should run with the environment provisioned + /// specifically for the search service. This should only be specified as the + /// execution environment if the indexer needs to access resources securely over + /// shared private link resources. + /// public static IndexerExecutionEnvironment Private { get; } = new IndexerExecutionEnvironment(PrivateValue); /// Determines if two values are the same. public static bool operator ==(IndexerExecutionEnvironment left, IndexerExecutionEnvironment right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.Serialization.cs new file mode 100644 index 000000000000..9890f6ff9d34 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.Serialization.cs @@ -0,0 +1,301 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class IndexerExecutionResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexerExecutionResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (options.Format != "W" && Optional.IsDefined(StatusDetail)) + { + writer.WritePropertyName("statusDetail"u8); + writer.WriteStringValue(StatusDetail.Value.ToString()); + } + if (options.Format != "W" && Optional.IsDefined(CurrentState)) + { + writer.WritePropertyName("currentState"u8); + writer.WriteObjectValue(CurrentState, options); + } + if (Optional.IsDefined(ErrorMessage)) + { + writer.WritePropertyName("errorMessage"u8); + writer.WriteStringValue(ErrorMessage); + } + if (Optional.IsDefined(StartTime)) + { + writer.WritePropertyName("startTime"u8); + writer.WriteStringValue(StartTime.Value, "O"); + } + if (Optional.IsDefined(EndTime)) + { + writer.WritePropertyName("endTime"u8); + writer.WriteStringValue(EndTime.Value, "O"); + } + writer.WritePropertyName("errors"u8); + writer.WriteStartArray(); + foreach (var item in Errors) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + writer.WritePropertyName("warnings"u8); + writer.WriteStartArray(); + foreach (var item in Warnings) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + writer.WritePropertyName("itemsProcessed"u8); + writer.WriteNumberValue(ItemCount); + writer.WritePropertyName("itemsFailed"u8); + writer.WriteNumberValue(FailedItemCount); + if (Optional.IsDefined(InitialTrackingState)) + { + writer.WritePropertyName("initialTrackingState"u8); + writer.WriteStringValue(InitialTrackingState); + } + if (Optional.IsDefined(FinalTrackingState)) + { + writer.WritePropertyName("finalTrackingState"u8); + writer.WriteStringValue(FinalTrackingState); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexerExecutionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexerExecutionResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexerExecutionResult(document.RootElement, options); + } + + internal static IndexerExecutionResult DeserializeIndexerExecutionResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IndexerExecutionStatus status = default; + IndexerExecutionStatusDetail? statusDetail = default; + Search.Documents.Indexes.Models.IndexerState currentState = default; + string errorMessage = default; + DateTimeOffset? startTime = default; + DateTimeOffset? endTime = default; + IReadOnlyList errors = default; + IReadOnlyList warnings = default; + int itemsProcessed = default; + int itemsFailed = default; + string initialTrackingState = default; + string finalTrackingState = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("status"u8)) + { + status = new IndexerExecutionStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("statusDetail"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + statusDetail = new IndexerExecutionStatusDetail(property.Value.GetString()); + continue; + } + if (property.NameEquals("currentState"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + currentState = Search.Documents.Indexes.Models.IndexerState.DeserializeIndexerState(property.Value, options); + continue; + } + if (property.NameEquals("errorMessage"u8)) + { + errorMessage = property.Value.GetString(); + continue; + } + if (property.NameEquals("startTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + startTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("endTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + endTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("errors"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexerError.DeserializeSearchIndexerError(item, options)); + } + errors = array; + continue; + } + if (property.NameEquals("warnings"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexerWarning.DeserializeSearchIndexerWarning(item, options)); + } + warnings = array; + continue; + } + if (property.NameEquals("itemsProcessed"u8)) + { + itemsProcessed = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("itemsFailed"u8)) + { + itemsFailed = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("initialTrackingState"u8)) + { + initialTrackingState = property.Value.GetString(); + continue; + } + if (property.NameEquals("finalTrackingState"u8)) + { + finalTrackingState = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexerExecutionResult( + status, + statusDetail, + currentState, + errorMessage, + startTime, + endTime, + errors, + warnings, + itemsProcessed, + itemsFailed, + initialTrackingState, + finalTrackingState, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexerExecutionResult)} does not support writing '{options.Format}' format."); + } + } + + IndexerExecutionResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexerExecutionResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexerExecutionResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexerExecutionResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexerExecutionResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.cs new file mode 100644 index 000000000000..8630644e32de --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionResult.cs @@ -0,0 +1,142 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// Represents the result of an individual indexer execution. + public partial class IndexerExecutionResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The outcome of this indexer execution. + /// The item-level indexing errors. + /// The item-level indexing warnings. + /// + /// The number of items that were processed during this indexer execution. This + /// includes both successfully processed items and items where indexing was + /// attempted but failed. + /// + /// The number of items that failed to be indexed during this indexer execution. + /// or is null. + internal IndexerExecutionResult(IndexerExecutionStatus status, IEnumerable errors, IEnumerable warnings, int itemCount, int failedItemCount) + { + Argument.AssertNotNull(errors, nameof(errors)); + Argument.AssertNotNull(warnings, nameof(warnings)); + + Status = status; + Errors = errors.ToList(); + Warnings = warnings.ToList(); + ItemCount = itemCount; + FailedItemCount = failedItemCount; + } + + /// Initializes a new instance of . + /// The outcome of this indexer execution. + /// The outcome of this indexer execution. + /// All of the state that defines and dictates the indexer's current execution. + /// The error message indicating the top-level error, if any. + /// The start time of this indexer execution. + /// The end time of this indexer execution, if the execution has already completed. + /// The item-level indexing errors. + /// The item-level indexing warnings. + /// + /// The number of items that were processed during this indexer execution. This + /// includes both successfully processed items and items where indexing was + /// attempted but failed. + /// + /// The number of items that failed to be indexed during this indexer execution. + /// Change tracking state with which an indexer execution started. + /// Change tracking state with which an indexer execution finished. + /// Keeps track of any properties unknown to the library. + internal IndexerExecutionResult(IndexerExecutionStatus status, IndexerExecutionStatusDetail? statusDetail, Search.Documents.Indexes.Models.IndexerState currentState, string errorMessage, DateTimeOffset? startTime, DateTimeOffset? endTime, IReadOnlyList errors, IReadOnlyList warnings, int itemCount, int failedItemCount, string initialTrackingState, string finalTrackingState, IDictionary serializedAdditionalRawData) + { + Status = status; + StatusDetail = statusDetail; + CurrentState = currentState; + ErrorMessage = errorMessage; + StartTime = startTime; + EndTime = endTime; + Errors = errors; + Warnings = warnings; + ItemCount = itemCount; + FailedItemCount = failedItemCount; + InitialTrackingState = initialTrackingState; + FinalTrackingState = finalTrackingState; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IndexerExecutionResult() + { + } + + /// The outcome of this indexer execution. + public IndexerExecutionStatus Status { get; } + /// The outcome of this indexer execution. + public IndexerExecutionStatusDetail? StatusDetail { get; } + /// All of the state that defines and dictates the indexer's current execution. + public Search.Documents.Indexes.Models.IndexerState CurrentState { get; } + /// The error message indicating the top-level error, if any. + public string ErrorMessage { get; } + /// The start time of this indexer execution. + public DateTimeOffset? StartTime { get; } + /// The end time of this indexer execution, if the execution has already completed. + public DateTimeOffset? EndTime { get; } + /// The item-level indexing errors. + public IReadOnlyList Errors { get; } + /// The item-level indexing warnings. + public IReadOnlyList Warnings { get; } + /// + /// The number of items that were processed during this indexer execution. This + /// includes both successfully processed items and items where indexing was + /// attempted but failed. + /// + public int ItemCount { get; } + /// The number of items that failed to be indexed during this indexer execution. + public int FailedItemCount { get; } + /// Change tracking state with which an indexer execution started. + public string InitialTrackingState { get; } + /// Change tracking state with which an indexer execution finished. + public string FinalTrackingState { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatus.cs new file mode 100644 index 000000000000..e56266318ebf --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatus.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Represents the status of an individual indexer execution. + public readonly partial struct IndexerExecutionStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public IndexerExecutionStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string TransientFailureValue = "transientFailure"; + private const string SuccessValue = "success"; + private const string InProgressValue = "inProgress"; + private const string ResetValue = "reset"; + + /// + /// An indexer invocation has failed, but the failure may be transient. Indexer + /// invocations will continue per schedule. + /// + public static IndexerExecutionStatus TransientFailure { get; } = new IndexerExecutionStatus(TransientFailureValue); + /// Indexer execution completed successfully. + public static IndexerExecutionStatus Success { get; } = new IndexerExecutionStatus(SuccessValue); + /// Indexer execution is in progress. + public static IndexerExecutionStatus InProgress { get; } = new IndexerExecutionStatus(InProgressValue); + /// Indexer has been reset. + public static IndexerExecutionStatus Reset { get; } = new IndexerExecutionStatus(ResetValue); + /// Determines if two values are the same. + public static bool operator ==(IndexerExecutionStatus left, IndexerExecutionStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(IndexerExecutionStatus left, IndexerExecutionStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator IndexerExecutionStatus(string value) => new IndexerExecutionStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is IndexerExecutionStatus other && Equals(other); + /// + public bool Equals(IndexerExecutionStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatusDetail.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatusDetail.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatusDetail.cs rename to sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatusDetail.cs index b66b5527d807..d9762bad0e30 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatusDetail.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerExecutionStatusDetail.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Details the status of an individual indexer execution. public readonly partial struct IndexerExecutionStatusDetail : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.Serialization.cs new file mode 100644 index 000000000000..105a07d6a26f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.Serialization.cs @@ -0,0 +1,251 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class IndexerState : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.IndexerState)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + if (options.Format != "W" && Optional.IsDefined(AllDocsInitialChangeTrackingState)) + { + writer.WritePropertyName("allDocsInitialChangeTrackingState"u8); + writer.WriteStringValue(AllDocsInitialChangeTrackingState); + } + if (options.Format != "W" && Optional.IsDefined(AllDocsFinalChangeTrackingState)) + { + writer.WritePropertyName("allDocsFinalChangeTrackingState"u8); + writer.WriteStringValue(AllDocsFinalChangeTrackingState); + } + if (options.Format != "W" && Optional.IsDefined(ResetDocsInitialChangeTrackingState)) + { + writer.WritePropertyName("resetDocsInitialChangeTrackingState"u8); + writer.WriteStringValue(ResetDocsInitialChangeTrackingState); + } + if (options.Format != "W" && Optional.IsDefined(ResetDocsFinalChangeTrackingState)) + { + writer.WritePropertyName("resetDocsFinalChangeTrackingState"u8); + writer.WriteStringValue(ResetDocsFinalChangeTrackingState); + } + if (options.Format != "W" && Optional.IsCollectionDefined(ResetDocumentKeys)) + { + writer.WritePropertyName("resetDocumentKeys"u8); + writer.WriteStartArray(); + foreach (var item in ResetDocumentKeys) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsCollectionDefined(ResetDataSourceDocumentIds)) + { + writer.WritePropertyName("resetDatasourceDocumentIds"u8); + writer.WriteStartArray(); + foreach (var item in ResetDataSourceDocumentIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.IndexerState IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.IndexerState)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.IndexerState.DeserializeIndexerState(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.IndexerState DeserializeIndexerState(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Search.Documents.IndexingMode? mode = default; + string allDocsInitialChangeTrackingState = default; + string allDocsFinalChangeTrackingState = default; + string resetDocsInitialChangeTrackingState = default; + string resetDocsFinalChangeTrackingState = default; + IReadOnlyList resetDocumentKeys = default; + IReadOnlyList resetDatasourceDocumentIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new Search.Documents.IndexingMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("allDocsInitialChangeTrackingState"u8)) + { + allDocsInitialChangeTrackingState = property.Value.GetString(); + continue; + } + if (property.NameEquals("allDocsFinalChangeTrackingState"u8)) + { + allDocsFinalChangeTrackingState = property.Value.GetString(); + continue; + } + if (property.NameEquals("resetDocsInitialChangeTrackingState"u8)) + { + resetDocsInitialChangeTrackingState = property.Value.GetString(); + continue; + } + if (property.NameEquals("resetDocsFinalChangeTrackingState"u8)) + { + resetDocsFinalChangeTrackingState = property.Value.GetString(); + continue; + } + if (property.NameEquals("resetDocumentKeys"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + resetDocumentKeys = array; + continue; + } + if (property.NameEquals("resetDatasourceDocumentIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + resetDatasourceDocumentIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.IndexerState( + mode, + allDocsInitialChangeTrackingState, + allDocsFinalChangeTrackingState, + resetDocsInitialChangeTrackingState, + resetDocsFinalChangeTrackingState, + resetDocumentKeys ?? new ChangeTrackingList(), + resetDatasourceDocumentIds ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.IndexerState)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.IndexerState IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.IndexerState.DeserializeIndexerState(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.IndexerState)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.IndexerState FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.IndexerState.DeserializeIndexerState(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.cs new file mode 100644 index 000000000000..d8424ad9cbd3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerState.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Represents all of the state that defines and dictates the indexer's current + /// execution. + /// + public partial class IndexerState + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal IndexerState() + { + ResetDocumentKeys = new ChangeTrackingList(); + ResetDataSourceDocumentIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The mode the indexer is running in. + /// + /// Change tracking state used when indexing starts on all documents in the + /// datasource. + /// + /// + /// Change tracking state value when indexing finishes on all documents in the + /// datasource. + /// + /// + /// Change tracking state used when indexing starts on select, reset documents in + /// the datasource. + /// + /// + /// Change tracking state value when indexing finishes on select, reset documents + /// in the datasource. + /// + /// + /// The list of document keys that have been reset. The document key is the + /// document's unique identifier for the data in the search index. The indexer will + /// prioritize selectively re-ingesting these keys. + /// + /// + /// The list of datasource document ids that have been reset. The datasource + /// document id is the unique identifier for the data in the datasource. The + /// indexer will prioritize selectively re-ingesting these ids. + /// + /// Keeps track of any properties unknown to the library. + internal IndexerState(Search.Documents.IndexingMode? mode, string allDocsInitialChangeTrackingState, string allDocsFinalChangeTrackingState, string resetDocsInitialChangeTrackingState, string resetDocsFinalChangeTrackingState, IReadOnlyList resetDocumentKeys, IReadOnlyList resetDataSourceDocumentIds, IDictionary serializedAdditionalRawData) + { + Mode = mode; + AllDocsInitialChangeTrackingState = allDocsInitialChangeTrackingState; + AllDocsFinalChangeTrackingState = allDocsFinalChangeTrackingState; + ResetDocsInitialChangeTrackingState = resetDocsInitialChangeTrackingState; + ResetDocsFinalChangeTrackingState = resetDocsFinalChangeTrackingState; + ResetDocumentKeys = resetDocumentKeys; + ResetDataSourceDocumentIds = resetDataSourceDocumentIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The mode the indexer is running in. + public Search.Documents.IndexingMode? Mode { get; } + /// + /// The list of document keys that have been reset. The document key is the + /// document's unique identifier for the data in the search index. The indexer will + /// prioritize selectively re-ingesting these keys. + /// + public IReadOnlyList ResetDocumentKeys { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexerStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexerStatus.cs new file mode 100644 index 000000000000..2d1d1e60f859 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexerStatus.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Represents the overall indexer status. + public readonly partial struct IndexerStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public IndexerStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string UnknownValue = "unknown"; + private const string ErrorValue = "error"; + private const string RunningValue = "running"; + + /// Indicates that the indexer is in an unknown state. + public static IndexerStatus Unknown { get; } = new IndexerStatus(UnknownValue); + /// + /// Indicates that the indexer experienced an error that cannot be corrected + /// without human intervention. + /// + public static IndexerStatus Error { get; } = new IndexerStatus(ErrorValue); + /// Indicates that the indexer is running normally. + public static IndexerStatus Running { get; } = new IndexerStatus(RunningValue); + /// Determines if two values are the same. + public static bool operator ==(IndexerStatus left, IndexerStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(IndexerStatus left, IndexerStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator IndexerStatus(string value) => new IndexerStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is IndexerStatus other && Equals(other); + /// + public bool Equals(IndexerStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Indexers.cs b/sdk/search/Azure.Search.Documents/src/Generated/Indexers.cs new file mode 100644 index 000000000000..439614eebc65 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Indexers.cs @@ -0,0 +1,1188 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The Indexers sub-client. + public partial class Indexers + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of Indexers for mocking. + protected Indexers() + { + } + + /// Initializes a new instance of Indexers. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal Indexers(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Resets the change tracking state associated with an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task ResetAsync(string indexerName, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Reset"); + scope.Start(); + try + { + using HttpMessage message = CreateResetRequest(indexerName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Resets the change tracking state associated with an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Reset(string indexerName, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Reset"); + scope.Start(); + try + { + using HttpMessage message = CreateResetRequest(indexerName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Resets specific documents in the datasource to be selectively re-ingested by + /// the indexer. + /// + /// The name of the indexer. + /// + /// The keys or ids of the documents to be re-ingested. If keys are provided, the + /// document key field must be specified in the indexer configuration. If ids are + /// provided, the document key field is ignored. + /// + /// + /// If false, keys or ids will be appended to existing ones. If true, only the keys + /// or ids in this payload will be queued to be re-ingested. + /// + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task ResetDocsAsync(string indexerName, ResetDocumentOptions keysOrIds = null, bool? overwrite = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using RequestContent content = keysOrIds?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await ResetDocsAsync(indexerName, content, overwrite, context).ConfigureAwait(false); + return response; + } + + /// + /// Resets specific documents in the datasource to be selectively re-ingested by + /// the indexer. + /// + /// The name of the indexer. + /// + /// The keys or ids of the documents to be re-ingested. If keys are provided, the + /// document key field must be specified in the indexer configuration. If ids are + /// provided, the document key field is ignored. + /// + /// + /// If false, keys or ids will be appended to existing ones. If true, only the keys + /// or ids in this payload will be queued to be re-ingested. + /// + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response ResetDocs(string indexerName, ResetDocumentOptions keysOrIds = null, bool? overwrite = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using RequestContent content = keysOrIds?.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = ResetDocs(indexerName, content, overwrite, context); + return response; + } + + /// + /// [Protocol Method] Resets specific documents in the datasource to be selectively re-ingested by + /// the indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the body of the request. + /// + /// If false, keys or ids will be appended to existing ones. If true, only the keys + /// or ids in this payload will be queued to be re-ingested. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task ResetDocsAsync(string indexerName, RequestContent content, bool? overwrite = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.ResetDocs"); + scope.Start(); + try + { + using HttpMessage message = CreateResetDocsRequest(indexerName, content, overwrite, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Resets specific documents in the datasource to be selectively re-ingested by + /// the indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the body of the request. + /// + /// If false, keys or ids will be appended to existing ones. If true, only the keys + /// or ids in this payload will be queued to be re-ingested. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response ResetDocs(string indexerName, RequestContent content, bool? overwrite = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.ResetDocs"); + scope.Start(); + try + { + using HttpMessage message = CreateResetDocsRequest(indexerName, content, overwrite, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Runs an indexer on-demand. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task RunAsync(string indexerName, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Run"); + scope.Start(); + try + { + using HttpMessage message = CreateRunRequest(indexerName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Runs an indexer on-demand. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Run(string indexerName, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Run"); + scope.Start(); + try + { + using HttpMessage message = CreateRunRequest(indexerName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new indexer or updates an indexer if it already exists. + /// The name of the indexer. + /// The definition of the indexer to create or update. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string indexerName, SearchIndexer indexer, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + Argument.AssertNotNull(indexer, nameof(indexer)); + + using RequestContent content = indexer.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(indexerName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// Creates a new indexer or updates an indexer if it already exists. + /// The name of the indexer. + /// The definition of the indexer to create or update. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string indexerName, SearchIndexer indexer, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + Argument.AssertNotNull(indexer, nameof(indexer)); + + using RequestContent content = indexer.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(indexerName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new indexer or updates an indexer if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string indexerName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(indexerName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new indexer or updates an indexer if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string indexerName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(indexerName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string indexerName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(indexerName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the indexer. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string indexerName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(indexerName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves an indexer definition. + /// The name of the indexer. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetIndexerAsync(string indexerName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetIndexerAsync(indexerName, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// Retrieves an indexer definition. + /// The name of the indexer. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetIndexer(string indexerName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetIndexer(indexerName, context); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves an indexer definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetIndexerAsync(string indexerName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.GetIndexer"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexerRequest(indexerName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves an indexer definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetIndexer(string indexerName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.GetIndexer"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexerRequest(indexerName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists all indexers available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual async Task> GetIndexersAsync(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetIndexersAsync(select, context).ConfigureAwait(false); + return Response.FromValue(ListIndexersResult.FromResponse(response), response); + } + + /// Lists all indexers available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual Response GetIndexers(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetIndexers(select, context); + return Response.FromValue(ListIndexersResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Lists all indexers available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetIndexersAsync(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("Indexers.GetIndexers"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexersRequest(select, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Lists all indexers available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetIndexers(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("Indexers.GetIndexers"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexersRequest(select, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new indexer. + /// The definition of the indexer to create. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(SearchIndexer indexer, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(indexer, nameof(indexer)); + + using RequestContent content = indexer.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// Creates a new indexer. + /// The definition of the indexer to create. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(SearchIndexer indexer, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(indexer, nameof(indexer)); + + using RequestContent content = indexer.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(SearchIndexer.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Returns the current status and execution history of an indexer. + /// The name of the indexer. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetStatusAsync(string indexerName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetStatusAsync(indexerName, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexerStatus.FromResponse(response), response); + } + + /// Returns the current status and execution history of an indexer. + /// The name of the indexer. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetStatus(string indexerName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetStatus(indexerName, context); + return Response.FromValue(SearchIndexerStatus.FromResponse(response), response); + } + + /// + /// [Protocol Method] Returns the current status and execution history of an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetStatusAsync(string indexerName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.GetStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetStatusRequest(indexerName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Returns the current status and execution history of an indexer. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the indexer. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetStatus(string indexerName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexerName, nameof(indexerName)); + + using var scope = ClientDiagnostics.CreateScope("Indexers.GetStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetStatusRequest(indexerName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateResetRequest(string indexerName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')/search.reset", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateResetDocsRequest(string indexerName, RequestContent content, bool? overwrite, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')/search.resetdocs", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (overwrite != null) + { + uri.AppendQuery("overwrite", overwrite.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateRunRequest(string indexerName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')/search.run", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateOrUpdateRequest(string indexerName, RequestContent content, bool? skipIndexerResetRequirementForCache, bool? disableCacheReprocessingChangeDetection, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (skipIndexerResetRequirementForCache != null) + { + uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); + } + if (disableCacheReprocessingChangeDetection != null) + { + uri.AppendQuery("disableCacheReprocessingChangeDetection", disableCacheReprocessingChangeDetection.Value, true); + } + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string indexerName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetIndexerRequest(string indexerName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetIndexersRequest(string select, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (select != null) + { + uri.AppendQuery("$select", select, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetStatusRequest(string indexerName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexers('", false); + uri.AppendPath(indexerName, true); + uri.AppendPath("')/search.status", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier204; + private static ResponseClassifier ResponseClassifier204 => _responseClassifier204 ??= new StatusCodeClassifier(stackalloc ushort[] { 204 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexersRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexersRestClient.cs deleted file mode 100644 index 3460520aaf1f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/IndexersRestClient.cs +++ /dev/null @@ -1,697 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - internal partial class IndexersRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of IndexersRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public IndexersRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateResetRequest(string indexerName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')/search.reset", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Resets the change tracking state associated with an indexer. - /// The name of the indexer to reset. - /// The cancellation token to use. - /// is null. - public async Task ResetAsync(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateResetRequest(indexerName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Resets the change tracking state associated with an indexer. - /// The name of the indexer to reset. - /// The cancellation token to use. - /// is null. - public Response Reset(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateResetRequest(indexerName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateResetDocsRequest(string indexerName, bool? overwrite, ResetDocumentOptions keysOrIds) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')/search.resetdocs", false); - if (overwrite != null) - { - uri.AppendQuery("overwrite", overwrite.Value, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - if (keysOrIds != null) - { - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(keysOrIds); - request.Content = content; - } - return message; - } - - /// Resets specific documents in the datasource to be selectively re-ingested by the indexer. - /// The name of the indexer to reset documents for. - /// If false, keys or ids will be appended to existing ones. If true, only the keys or ids in this payload will be queued to be re-ingested. - /// The to use. - /// The cancellation token to use. - /// is null. - public async Task ResetDocsAsync(string indexerName, bool? overwrite = null, ResetDocumentOptions keysOrIds = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateResetDocsRequest(indexerName, overwrite, keysOrIds); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Resets specific documents in the datasource to be selectively re-ingested by the indexer. - /// The name of the indexer to reset documents for. - /// If false, keys or ids will be appended to existing ones. If true, only the keys or ids in this payload will be queued to be re-ingested. - /// The to use. - /// The cancellation token to use. - /// is null. - public Response ResetDocs(string indexerName, bool? overwrite = null, ResetDocumentOptions keysOrIds = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateResetDocsRequest(indexerName, overwrite, keysOrIds); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateRunRequest(string indexerName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')/search.run", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Runs an indexer on-demand. - /// The name of the indexer to run. - /// The cancellation token to use. - /// is null. - public async Task RunAsync(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateRunRequest(indexerName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 202: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Runs an indexer on-demand. - /// The name of the indexer to run. - /// The cancellation token to use. - /// is null. - public Response Run(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateRunRequest(indexerName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 202: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateOrUpdateRequest(string indexerName, SearchIndexer indexer, string ifMatch, string ifNoneMatch, bool? skipIndexerResetRequirementForCache, bool? disableCacheReprocessingChangeDetection) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - if (skipIndexerResetRequirementForCache != null) - { - uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); - } - if (disableCacheReprocessingChangeDetection != null) - { - uri.AppendQuery("disableCacheReprocessingChangeDetection", disableCacheReprocessingChangeDetection.Value, true); - } - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(indexer); - request.Content = content; - return message; - } - - /// Creates a new indexer or updates an indexer if it already exists. - /// The name of the indexer to create or update. - /// The definition of the indexer to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// Disables cache reprocessing change detection. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string indexerName, SearchIndexer indexer, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - if (indexer == null) - { - throw new ArgumentNullException(nameof(indexer)); - } - - using var message = CreateCreateOrUpdateRequest(indexerName, indexer, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexer value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new indexer or updates an indexer if it already exists. - /// The name of the indexer to create or update. - /// The definition of the indexer to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// Disables cache reprocessing change detection. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string indexerName, SearchIndexer indexer, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - if (indexer == null) - { - throw new ArgumentNullException(nameof(indexer)); - } - - using var message = CreateCreateOrUpdateRequest(indexerName, indexer, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexer value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string indexerName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes an indexer. - /// The name of the indexer to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string indexerName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateDeleteRequest(indexerName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes an indexer. - /// The name of the indexer to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string indexerName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateDeleteRequest(indexerName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string indexerName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves an indexer definition. - /// The name of the indexer to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateGetRequest(indexerName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndexer value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves an indexer definition. - /// The name of the indexer to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateGetRequest(indexerName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndexer value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest(string select) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers", false); - if (select != null) - { - uri.AppendQuery("$select", select, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Lists all indexers available for a search service. - /// Selects which top-level properties of the indexers to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public async Task> ListAsync(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListIndexersResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListIndexersResult.DeserializeListIndexersResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Lists all indexers available for a search service. - /// Selects which top-level properties of the indexers to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public Response List(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListIndexersResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListIndexersResult.DeserializeListIndexersResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateRequest(SearchIndexer indexer) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(indexer); - request.Content = content; - return message; - } - - /// Creates a new indexer. - /// The definition of the indexer to create. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SearchIndexer indexer, CancellationToken cancellationToken = default) - { - if (indexer == null) - { - throw new ArgumentNullException(nameof(indexer)); - } - - using var message = CreateCreateRequest(indexer); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SearchIndexer value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new indexer. - /// The definition of the indexer to create. - /// The cancellation token to use. - /// is null. - public Response Create(SearchIndexer indexer, CancellationToken cancellationToken = default) - { - if (indexer == null) - { - throw new ArgumentNullException(nameof(indexer)); - } - - using var message = CreateCreateRequest(indexer); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SearchIndexer value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexer.DeserializeSearchIndexer(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetStatusRequest(string indexerName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexers('", false); - uri.AppendPath(indexerName, true); - uri.AppendPath("')/search.status", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Returns the current status and execution history of an indexer. - /// The name of the indexer for which to retrieve status. - /// The cancellation token to use. - /// is null. - public async Task> GetStatusAsync(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateGetStatusRequest(indexerName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerStatus value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerStatus.DeserializeSearchIndexerStatus(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Returns the current status and execution history of an indexer. - /// The name of the indexer for which to retrieve status. - /// The cancellation token to use. - /// is null. - public Response GetStatus(string indexerName, CancellationToken cancellationToken = default) - { - if (indexerName == null) - { - throw new ArgumentNullException(nameof(indexerName)); - } - - using var message = CreateGetStatusRequest(indexerName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerStatus value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerStatus.DeserializeSearchIndexerStatus(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Indexes.cs b/sdk/search/Azure.Search.Documents/src/Generated/Indexes.cs new file mode 100644 index 000000000000..7e30ec0121b2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Indexes.cs @@ -0,0 +1,987 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Autorest.CSharp.Core; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The Indexes sub-client. + internal partial class Indexes + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of Indexes for mocking. + protected Indexes() + { + } + + /// Initializes a new instance of Indexes. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal Indexes(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// Creates a new search index. + /// The definition of the index to create. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(SearchIndex index, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(index, nameof(index)); + + using RequestContent content = index.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// Creates a new search index. + /// The definition of the index to create. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(SearchIndex index, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(index, nameof(index)); + + using RequestContent content = index.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new search index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new search index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new search index or updates an index if it already exists. + /// The name of the index. + /// The definition of the index to create or update. + /// + /// Allows new analyzers, tokenizers, token filters, or char filters to be added to + /// an index by taking the index offline for at least a few seconds. This + /// temporarily causes indexing and query requests to fail. Performance and write + /// availability of the index can be impaired for several minutes after the index + /// is updated, or longer for very large indexes. + /// + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string indexName, SearchIndex index, bool? allowIndexDowntime = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(index, nameof(index)); + + using RequestContent content = index.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(indexName, content, allowIndexDowntime, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// Creates a new search index or updates an index if it already exists. + /// The name of the index. + /// The definition of the index to create or update. + /// + /// Allows new analyzers, tokenizers, token filters, or char filters to be added to + /// an index by taking the index offline for at least a few seconds. This + /// temporarily causes indexing and query requests to fail. Performance and write + /// availability of the index can be impaired for several minutes after the index + /// is updated, or longer for very large indexes. + /// + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string indexName, SearchIndex index, bool? allowIndexDowntime = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(index, nameof(index)); + + using RequestContent content = index.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(indexName, content, allowIndexDowntime, matchConditions, context); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new search index or updates an index if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// + /// Allows new analyzers, tokenizers, token filters, or char filters to be added to + /// an index by taking the index offline for at least a few seconds. This + /// temporarily causes indexing and query requests to fail. Performance and write + /// availability of the index can be impaired for several minutes after the index + /// is updated, or longer for very large indexes. + /// + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string indexName, RequestContent content, bool? allowIndexDowntime = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(indexName, content, allowIndexDowntime, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new search index or updates an index if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// + /// Allows new analyzers, tokenizers, token filters, or char filters to be added to + /// an index by taking the index offline for at least a few seconds. This + /// temporarily causes indexing and query requests to fail. Performance and write + /// availability of the index can be impaired for several minutes after the index + /// is updated, or longer for very large indexes. + /// + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string indexName, RequestContent content, bool? allowIndexDowntime = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(indexName, content, allowIndexDowntime, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a search index and all the documents it contains. This operation is + /// permanent, with no recovery option. Make sure you have a master copy of your + /// index definition, data ingestion code, and a backup of the primary data source + /// in case you need to re-build the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string indexName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(indexName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a search index and all the documents it contains. This operation is + /// permanent, with no recovery option. Make sure you have a master copy of your + /// index definition, data ingestion code, and a backup of the primary data source + /// in case you need to re-build the index. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string indexName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(indexName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves an index definition. + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetIndexAsync(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetIndexAsync(indexName, context).ConfigureAwait(false); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// Retrieves an index definition. + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetIndex(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetIndex(indexName, context); + return Response.FromValue(SearchIndex.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves an index definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetIndexAsync(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.GetIndex"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexRequest(indexName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves an index definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetIndex(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.GetIndex"); + scope.Start(); + try + { + using HttpMessage message = CreateGetIndexRequest(indexName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// Returns statistics for the given index, including a document count and storage + /// usage. + /// + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetStatisticsAsync(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetStatisticsAsync(indexName, context).ConfigureAwait(false); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexStatistics.FromResponse(response), response); + } + + /// + /// Returns statistics for the given index, including a document count and storage + /// usage. + /// + /// The name of the index. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetStatistics(string indexName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetStatistics(indexName, context); + return Response.FromValue(Search.Documents.Indexes.Models.SearchIndexStatistics.FromResponse(response), response); + } + + /// + /// [Protocol Method] Returns statistics for the given index, including a document count and storage + /// usage. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetStatisticsAsync(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.GetStatistics"); + scope.Start(); + try + { + using HttpMessage message = CreateGetStatisticsRequest(indexName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Returns statistics for the given index, including a document count and storage + /// usage. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetStatistics(string indexName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.GetStatistics"); + scope.Start(); + try + { + using HttpMessage message = CreateGetStatisticsRequest(indexName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Shows how an analyzer breaks text into tokens. + /// The name of the index. + /// The text and analyzer or analysis components to test. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> AnalyzeAsync(string indexName, Search.Documents.Indexes.Models.AnalyzeTextOptions request, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(request, nameof(request)); + + using RequestContent content = request.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AnalyzeAsync(indexName, content, context).ConfigureAwait(false); + return Response.FromValue(AnalyzeResult.FromResponse(response), response); + } + + /// Shows how an analyzer breaks text into tokens. + /// The name of the index. + /// The text and analyzer or analysis components to test. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response Analyze(string indexName, Search.Documents.Indexes.Models.AnalyzeTextOptions request, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(request, nameof(request)); + + using RequestContent content = request.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Analyze(indexName, content, context); + return Response.FromValue(AnalyzeResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Shows how an analyzer breaks text into tokens. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AnalyzeAsync(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Analyze"); + scope.Start(); + try + { + using HttpMessage message = CreateAnalyzeRequest(indexName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Shows how an analyzer breaks text into tokens. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the index. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Analyze(string indexName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(indexName, nameof(indexName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Indexes.Analyze"); + scope.Start(); + try + { + using HttpMessage message = CreateAnalyzeRequest(indexName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists all indexes available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual AsyncPageable GetIndexesAsync(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexesRequest(select, context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => SearchIndex.DeserializeSearchIndex(e), ClientDiagnostics, _pipeline, "Indexes.GetIndexes", "value", null, context); + } + + /// Lists all indexes available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual Pageable GetIndexes(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexesRequest(select, context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => SearchIndex.DeserializeSearchIndex(e), ClientDiagnostics, _pipeline, "Indexes.GetIndexes", "value", null, context); + } + + /// + /// [Protocol Method] Lists all indexes available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual AsyncPageable GetIndexesAsync(string select, RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexesRequest(select, context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "Indexes.GetIndexes", "value", null, context); + } + + /// + /// [Protocol Method] Lists all indexes available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual Pageable GetIndexes(string select, RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexesRequest(select, context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "Indexes.GetIndexes", "value", null, context); + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetIndexesRequest(string select, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (select != null) + { + uri.AppendQuery("$select", select, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateOrUpdateRequest(string indexName, RequestContent content, bool? allowIndexDowntime, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (allowIndexDowntime != null) + { + uri.AppendQuery("allowIndexDowntime", allowIndexDowntime.Value, true); + } + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string indexName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetIndexRequest(string indexName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetStatisticsRequest(string indexName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/search.stats", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAnalyzeRequest(string indexName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexes('", false); + uri.AppendPath(indexName, true); + uri.AppendPath("')/search.analyze", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexesRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexesRestClient.cs deleted file mode 100644 index 0f0bbd40640f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/IndexesRestClient.cs +++ /dev/null @@ -1,579 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents -{ - internal partial class IndexesRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of IndexesRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public IndexesRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCreateRequest(SearchIndex index) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(index); - request.Content = content; - return message; - } - - /// Creates a new search index. - /// The definition of the index to create. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SearchIndex index, CancellationToken cancellationToken = default) - { - if (index == null) - { - throw new ArgumentNullException(nameof(index)); - } - - using var message = CreateCreateRequest(index); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SearchIndex value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new search index. - /// The definition of the index to create. - /// The cancellation token to use. - /// is null. - public Response Create(SearchIndex index, CancellationToken cancellationToken = default) - { - if (index == null) - { - throw new ArgumentNullException(nameof(index)); - } - - using var message = CreateCreateRequest(index); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SearchIndex value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest(string select) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes", false); - if (select != null) - { - uri.AppendQuery("$select", select, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Lists all indexes available for a search service. - /// Selects which top-level properties of the index definitions to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public async Task> ListAsync(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListIndexesResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListIndexesResult.DeserializeListIndexesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Lists all indexes available for a search service. - /// Selects which top-level properties of the index definitions to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public Response List(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListIndexesResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListIndexesResult.DeserializeListIndexesResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateOrUpdateRequest(string indexName, SearchIndex index, bool? allowIndexDowntime, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes('", false); - uri.AppendPath(indexName, true); - uri.AppendPath("')", false); - if (allowIndexDowntime != null) - { - uri.AppendQuery("allowIndexDowntime", allowIndexDowntime.Value, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(index); - request.Content = content; - return message; - } - - /// Creates a new search index or updates an index if it already exists. - /// The definition of the index to create or update. - /// The definition of the index to create or update. - /// Allows new analyzers, tokenizers, token filters, or char filters to be added to an index by taking the index offline for at least a few seconds. This temporarily causes indexing and query requests to fail. Performance and write availability of the index can be impaired for several minutes after the index is updated, or longer for very large indexes. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string indexName, SearchIndex index, bool? allowIndexDowntime = null, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - if (index == null) - { - throw new ArgumentNullException(nameof(index)); - } - - using var message = CreateCreateOrUpdateRequest(indexName, index, allowIndexDowntime, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndex value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new search index or updates an index if it already exists. - /// The definition of the index to create or update. - /// The definition of the index to create or update. - /// Allows new analyzers, tokenizers, token filters, or char filters to be added to an index by taking the index offline for at least a few seconds. This temporarily causes indexing and query requests to fail. Performance and write availability of the index can be impaired for several minutes after the index is updated, or longer for very large indexes. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string indexName, SearchIndex index, bool? allowIndexDowntime = null, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - if (index == null) - { - throw new ArgumentNullException(nameof(index)); - } - - using var message = CreateCreateOrUpdateRequest(indexName, index, allowIndexDowntime, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndex value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string indexName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes('", false); - uri.AppendPath(indexName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes a search index and all the documents it contains. This operation is permanent, with no recovery option. Make sure you have a master copy of your index definition, data ingestion code, and a backup of the primary data source in case you need to re-build the index. - /// The name of the index to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string indexName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateDeleteRequest(indexName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes a search index and all the documents it contains. This operation is permanent, with no recovery option. Make sure you have a master copy of your index definition, data ingestion code, and a backup of the primary data source in case you need to re-build the index. - /// The name of the index to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string indexName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateDeleteRequest(indexName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string indexName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes('", false); - uri.AppendPath(indexName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves an index definition. - /// The name of the index to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string indexName, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateGetRequest(indexName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndex value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves an index definition. - /// The name of the index to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string indexName, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateGetRequest(indexName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndex value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndex.DeserializeSearchIndex(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetStatisticsRequest(string indexName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes('", false); - uri.AppendPath(indexName, true); - uri.AppendPath("')/search.stats", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Returns statistics for the given index, including a document count and storage usage. - /// The name of the index for which to retrieve statistics. - /// The cancellation token to use. - /// is null. - public async Task> GetStatisticsAsync(string indexName, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateGetStatisticsRequest(indexName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndexStatistics value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexStatistics.DeserializeSearchIndexStatistics(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Returns statistics for the given index, including a document count and storage usage. - /// The name of the index for which to retrieve statistics. - /// The cancellation token to use. - /// is null. - public Response GetStatistics(string indexName, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - - using var message = CreateGetStatisticsRequest(indexName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndexStatistics value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexStatistics.DeserializeSearchIndexStatistics(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateAnalyzeRequest(string indexName, AnalyzeTextOptions request) - { - var message = _pipeline.CreateMessage(); - var request0 = message.Request; - request0.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/indexes('", false); - uri.AppendPath(indexName, true); - uri.AppendPath("')/search.analyze", false); - uri.AppendQuery("api-version", _apiVersion, true); - request0.Uri = uri; - request0.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request0.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(request); - request0.Content = content; - return message; - } - - /// Shows how an analyzer breaks text into tokens. - /// The name of the index for which to test an analyzer. - /// The text and analyzer or analysis components to test. - /// The cancellation token to use. - /// or is null. - public async Task> AnalyzeAsync(string indexName, AnalyzeTextOptions request, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - if (request == null) - { - throw new ArgumentNullException(nameof(request)); - } - - using var message = CreateAnalyzeRequest(indexName, request); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - AnalyzeResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = AnalyzeResult.DeserializeAnalyzeResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Shows how an analyzer breaks text into tokens. - /// The name of the index for which to test an analyzer. - /// The text and analyzer or analysis components to test. - /// The cancellation token to use. - /// or is null. - public Response Analyze(string indexName, AnalyzeTextOptions request, CancellationToken cancellationToken = default) - { - if (indexName == null) - { - throw new ArgumentNullException(nameof(indexName)); - } - if (request == null) - { - throw new ArgumentNullException(nameof(request)); - } - - using var message = CreateAnalyzeRequest(indexName, request); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - AnalyzeResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = AnalyzeResult.DeserializeAnalyzeResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingMode.cs new file mode 100644 index 000000000000..e9da2856f022 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingMode.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Represents the mode the indexer is executing in. + public readonly partial struct IndexingMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public IndexingMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string IndexingAllDocsValue = "indexingAllDocs"; + private const string IndexingResetDocsValue = "indexingResetDocs"; + + /// The indexer is indexing all documents in the datasource. + public static IndexingMode IndexingAllDocs { get; } = new IndexingMode(IndexingAllDocsValue); + /// + /// The indexer is indexing selective, reset documents in the datasource. The + /// documents being indexed are defined on indexer status. + /// + public static IndexingMode IndexingResetDocs { get; } = new IndexingMode(IndexingResetDocsValue); + /// Determines if two values are the same. + public static bool operator ==(IndexingMode left, IndexingMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(IndexingMode left, IndexingMode right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator IndexingMode(string value) => new IndexingMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is IndexingMode other && Equals(other); + /// + public bool Equals(IndexingMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.Serialization.cs new file mode 100644 index 000000000000..83a8f331e375 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.Serialization.cs @@ -0,0 +1,194 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class IndexingParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(BatchSize)) + { + writer.WritePropertyName("batchSize"u8); + writer.WriteNumberValue(BatchSize.Value); + } + if (Optional.IsDefined(MaxFailedItems)) + { + writer.WritePropertyName("maxFailedItems"u8); + writer.WriteNumberValue(MaxFailedItems.Value); + } + if (Optional.IsDefined(MaxFailedItemsPerBatch)) + { + writer.WritePropertyName("maxFailedItemsPerBatch"u8); + writer.WriteNumberValue(MaxFailedItemsPerBatch.Value); + } + if (Optional.IsDefined(Configuration)) + { + writer.WritePropertyName("configuration"u8); + writer.WriteObjectValue(Configuration, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexingParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexingParameters(document.RootElement, options); + } + + internal static IndexingParameters DeserializeIndexingParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? batchSize = default; + int? maxFailedItems = default; + int? maxFailedItemsPerBatch = default; + IndexingParametersConfiguration configuration = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("batchSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + batchSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxFailedItems"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxFailedItems = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxFailedItemsPerBatch"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxFailedItemsPerBatch = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("configuration"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + configuration = IndexingParametersConfiguration.DeserializeIndexingParametersConfiguration(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexingParameters(batchSize, maxFailedItems, maxFailedItemsPerBatch, configuration, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexingParameters)} does not support writing '{options.Format}' format."); + } + } + + IndexingParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexingParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexingParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.cs new file mode 100644 index 000000000000..31297c51ff9e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParameters.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents parameters for indexer execution. + public partial class IndexingParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public IndexingParameters() + { + } + + /// Initializes a new instance of . + /// + /// The number of items that are read from the data source and indexed as a single + /// batch in order to improve performance. The default depends on the data source + /// type. + /// + /// + /// The maximum number of items that can fail indexing for indexer execution to + /// still be considered successful. -1 means no limit. Default is 0. + /// + /// + /// The maximum number of items in a single batch that can fail indexing for the + /// batch to still be considered successful. -1 means no limit. Default is 0. + /// + /// + /// A dictionary of indexer-specific configuration properties. Each name is the + /// name of a specific property. Each value must be of a primitive type. + /// + /// Keeps track of any properties unknown to the library. + internal IndexingParameters(int? batchSize, int? maxFailedItems, int? maxFailedItemsPerBatch, IndexingParametersConfiguration configuration, IDictionary serializedAdditionalRawData) + { + BatchSize = batchSize; + MaxFailedItems = maxFailedItems; + MaxFailedItemsPerBatch = maxFailedItemsPerBatch; + Configuration = configuration; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The number of items that are read from the data source and indexed as a single + /// batch in order to improve performance. The default depends on the data source + /// type. + /// + public int? BatchSize { get; set; } + /// + /// The maximum number of items that can fail indexing for indexer execution to + /// still be considered successful. -1 means no limit. Default is 0. + /// + public int? MaxFailedItems { get; set; } + /// + /// The maximum number of items in a single batch that can fail indexing for the + /// batch to still be considered successful. -1 means no limit. Default is 0. + /// + public int? MaxFailedItemsPerBatch { get; set; } + /// + /// A dictionary of indexer-specific configuration properties. Each name is the + /// name of a specific property. Each value must be of a primitive type. + /// + public IndexingParametersConfiguration Configuration { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.Serialization.cs new file mode 100644 index 000000000000..c32495c70d3d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.Serialization.cs @@ -0,0 +1,394 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class IndexingParametersConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingParametersConfiguration)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ParsingMode)) + { + writer.WritePropertyName("parsingMode"u8); + writer.WriteStringValue(ParsingMode.Value.ToString()); + } + if (Optional.IsDefined(ExcludedFileNameExtensions)) + { + writer.WritePropertyName("excludedFileNameExtensions"u8); + writer.WriteStringValue(ExcludedFileNameExtensions); + } + if (Optional.IsDefined(IndexedFileNameExtensions)) + { + writer.WritePropertyName("indexedFileNameExtensions"u8); + writer.WriteStringValue(IndexedFileNameExtensions); + } + if (Optional.IsDefined(FailOnUnsupportedContentType)) + { + writer.WritePropertyName("failOnUnsupportedContentType"u8); + writer.WriteBooleanValue(FailOnUnsupportedContentType.Value); + } + if (Optional.IsDefined(FailOnUnprocessableDocument)) + { + writer.WritePropertyName("failOnUnprocessableDocument"u8); + writer.WriteBooleanValue(FailOnUnprocessableDocument.Value); + } + if (Optional.IsDefined(IndexStorageMetadataOnlyForOversizedDocuments)) + { + writer.WritePropertyName("indexStorageMetadataOnlyForOversizedDocuments"u8); + writer.WriteBooleanValue(IndexStorageMetadataOnlyForOversizedDocuments.Value); + } + if (Optional.IsDefined(DelimitedTextHeaders)) + { + writer.WritePropertyName("delimitedTextHeaders"u8); + writer.WriteStringValue(DelimitedTextHeaders); + } + if (Optional.IsDefined(DelimitedTextDelimiter)) + { + writer.WritePropertyName("delimitedTextDelimiter"u8); + writer.WriteStringValue(DelimitedTextDelimiter); + } + if (Optional.IsDefined(FirstLineContainsHeaders)) + { + writer.WritePropertyName("firstLineContainsHeaders"u8); + writer.WriteBooleanValue(FirstLineContainsHeaders.Value); + } + if (Optional.IsDefined(MarkdownParsingSubmode)) + { + writer.WritePropertyName("markdownParsingSubmode"u8); + writer.WriteStringValue(MarkdownParsingSubmode.Value.ToString()); + } + if (Optional.IsDefined(MarkdownHeaderDepth)) + { + writer.WritePropertyName("markdownHeaderDepth"u8); + writer.WriteStringValue(MarkdownHeaderDepth.Value.ToString()); + } + if (Optional.IsDefined(DocumentRoot)) + { + writer.WritePropertyName("documentRoot"u8); + writer.WriteStringValue(DocumentRoot); + } + if (Optional.IsDefined(DataToExtract)) + { + writer.WritePropertyName("dataToExtract"u8); + writer.WriteStringValue(DataToExtract.Value.ToString()); + } + if (Optional.IsDefined(ImageAction)) + { + writer.WritePropertyName("imageAction"u8); + writer.WriteStringValue(ImageAction.Value.ToString()); + } + if (Optional.IsDefined(AllowSkillsetToReadFileData)) + { + writer.WritePropertyName("allowSkillsetToReadFileData"u8); + writer.WriteBooleanValue(AllowSkillsetToReadFileData.Value); + } + if (Optional.IsDefined(PdfTextRotationAlgorithm)) + { + writer.WritePropertyName("pdfTextRotationAlgorithm"u8); + writer.WriteStringValue(PdfTextRotationAlgorithm.Value.ToString()); + } + if (Optional.IsDefined(ExecutionEnvironment)) + { + writer.WritePropertyName("executionEnvironment"u8); + writer.WriteStringValue(ExecutionEnvironment.Value.ToString()); + } + if (Optional.IsDefined(QueryTimeout)) + { + writer.WritePropertyName("queryTimeout"u8); + writer.WriteStringValue(QueryTimeout); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + IndexingParametersConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingParametersConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexingParametersConfiguration(document.RootElement, options); + } + + internal static IndexingParametersConfiguration DeserializeIndexingParametersConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + BlobIndexerParsingMode? parsingMode = default; + string excludedFileNameExtensions = default; + string indexedFileNameExtensions = default; + bool? failOnUnsupportedContentType = default; + bool? failOnUnprocessableDocument = default; + bool? indexStorageMetadataOnlyForOversizedDocuments = default; + string delimitedTextHeaders = default; + string delimitedTextDelimiter = default; + bool? firstLineContainsHeaders = default; + MarkdownParsingSubmode? markdownParsingSubmode = default; + MarkdownHeaderDepth? markdownHeaderDepth = default; + string documentRoot = default; + BlobIndexerDataToExtract? dataToExtract = default; + BlobIndexerImageAction? imageAction = default; + bool? allowSkillsetToReadFileData = default; + Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm? pdfTextRotationAlgorithm = default; + IndexerExecutionEnvironment? executionEnvironment = default; + string queryTimeout = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("parsingMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + parsingMode = new BlobIndexerParsingMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("excludedFileNameExtensions"u8)) + { + excludedFileNameExtensions = property.Value.GetString(); + continue; + } + if (property.NameEquals("indexedFileNameExtensions"u8)) + { + indexedFileNameExtensions = property.Value.GetString(); + continue; + } + if (property.NameEquals("failOnUnsupportedContentType"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + failOnUnsupportedContentType = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("failOnUnprocessableDocument"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + failOnUnprocessableDocument = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("indexStorageMetadataOnlyForOversizedDocuments"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + indexStorageMetadataOnlyForOversizedDocuments = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("delimitedTextHeaders"u8)) + { + delimitedTextHeaders = property.Value.GetString(); + continue; + } + if (property.NameEquals("delimitedTextDelimiter"u8)) + { + delimitedTextDelimiter = property.Value.GetString(); + continue; + } + if (property.NameEquals("firstLineContainsHeaders"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + firstLineContainsHeaders = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("markdownParsingSubmode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + markdownParsingSubmode = new MarkdownParsingSubmode(property.Value.GetString()); + continue; + } + if (property.NameEquals("markdownHeaderDepth"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + markdownHeaderDepth = new MarkdownHeaderDepth(property.Value.GetString()); + continue; + } + if (property.NameEquals("documentRoot"u8)) + { + documentRoot = property.Value.GetString(); + continue; + } + if (property.NameEquals("dataToExtract"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dataToExtract = new BlobIndexerDataToExtract(property.Value.GetString()); + continue; + } + if (property.NameEquals("imageAction"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + imageAction = new BlobIndexerImageAction(property.Value.GetString()); + continue; + } + if (property.NameEquals("allowSkillsetToReadFileData"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + allowSkillsetToReadFileData = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("pdfTextRotationAlgorithm"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + pdfTextRotationAlgorithm = new Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm(property.Value.GetString()); + continue; + } + if (property.NameEquals("executionEnvironment"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + executionEnvironment = new IndexerExecutionEnvironment(property.Value.GetString()); + continue; + } + if (property.NameEquals("queryTimeout"u8)) + { + queryTimeout = property.Value.GetString(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new IndexingParametersConfiguration( + parsingMode, + excludedFileNameExtensions, + indexedFileNameExtensions, + failOnUnsupportedContentType, + failOnUnprocessableDocument, + indexStorageMetadataOnlyForOversizedDocuments, + delimitedTextHeaders, + delimitedTextDelimiter, + firstLineContainsHeaders, + markdownParsingSubmode, + markdownHeaderDepth, + documentRoot, + dataToExtract, + imageAction, + allowSkillsetToReadFileData, + pdfTextRotationAlgorithm, + executionEnvironment, + queryTimeout, + additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexingParametersConfiguration)} does not support writing '{options.Format}' format."); + } + } + + IndexingParametersConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingParametersConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexingParametersConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexingParametersConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingParametersConfiguration(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.cs new file mode 100644 index 000000000000..b7ae1a65f1d0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingParametersConfiguration.cs @@ -0,0 +1,251 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// A dictionary of indexer-specific configuration properties. Each name is the + /// name of a specific property. Each value must be of a primitive type. + /// + public partial class IndexingParametersConfiguration + { + /// Initializes a new instance of . + public IndexingParametersConfiguration() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Represents the parsing mode for indexing from an Azure blob data source. + /// + /// Comma-delimited list of filename extensions to ignore when processing from + /// Azure blob storage. For example, you could exclude ".png, .mp4" to skip over + /// those files during indexing. + /// + /// + /// Comma-delimited list of filename extensions to select when processing from + /// Azure blob storage. For example, you could focus indexing on specific + /// application files ".docx, .pptx, .msg" to specifically include those file + /// types. + /// + /// + /// For Azure blobs, set to false if you want to continue indexing when an + /// unsupported content type is encountered, and you don't know all the content + /// types (file extensions) in advance. + /// + /// + /// For Azure blobs, set to false if you want to continue indexing if a document + /// fails indexing. + /// + /// + /// For Azure blobs, set this property to true to still index storage metadata for + /// blob content that is too large to process. Oversized blobs are treated as + /// errors by default. For limits on blob size, see + /// https://learn.microsoft.com/azure/search/search-limits-quotas-capacity. + /// + /// + /// For CSV blobs, specifies a comma-delimited list of column headers, useful for + /// mapping source fields to destination fields in an index. + /// + /// + /// For CSV blobs, specifies the end-of-line single-character delimiter for CSV + /// files where each line starts a new document (for example, "|"). + /// + /// + /// For CSV blobs, indicates that the first (non-blank) line of each blob contains + /// headers. + /// + /// + /// Specifies the submode that will determine whether a markdown file will be + /// parsed into exactly one search document or multiple search documents. Default + /// is `oneToMany`. + /// + /// + /// Specifies the max header depth that will be considered while grouping markdown + /// content. Default is `h6`. + /// + /// + /// For JSON arrays, given a structured or semi-structured document, you can + /// specify a path to the array using this property. + /// + /// + /// Specifies the data to extract from Azure blob storage and tells the indexer + /// which data to extract from image content when "imageAction" is set to a value + /// other than "none". This applies to embedded image content in a .PDF or other + /// application, or image files such as .jpg and .png, in Azure blobs. + /// + /// + /// Determines how to process embedded images and image files in Azure blob + /// storage. Setting the "imageAction" configuration to any value other than + /// "none" requires that a skillset also be attached to that indexer. + /// + /// + /// If true, will create a path //document//file_data that is an object + /// representing the original file data downloaded from your blob data source. + /// This allows you to pass the original file data to a custom skill for processing + /// within the enrichment pipeline, or to the Document Extraction skill. + /// + /// Determines algorithm for text extraction from PDF files in Azure blob storage. + /// Specifies the environment in which the indexer should execute. + /// + /// Increases the timeout beyond the 5-minute default for Azure SQL database data + /// sources, specified in the format "hh:mm:ss". + /// + /// Additional Properties. + internal IndexingParametersConfiguration(BlobIndexerParsingMode? parsingMode, string excludedFileNameExtensions, string indexedFileNameExtensions, bool? failOnUnsupportedContentType, bool? failOnUnprocessableDocument, bool? indexStorageMetadataOnlyForOversizedDocuments, string delimitedTextHeaders, string delimitedTextDelimiter, bool? firstLineContainsHeaders, MarkdownParsingSubmode? markdownParsingSubmode, MarkdownHeaderDepth? markdownHeaderDepth, string documentRoot, BlobIndexerDataToExtract? dataToExtract, BlobIndexerImageAction? imageAction, bool? allowSkillsetToReadFileData, Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm? pdfTextRotationAlgorithm, IndexerExecutionEnvironment? executionEnvironment, string queryTimeout, IDictionary additionalProperties) + { + ParsingMode = parsingMode; + ExcludedFileNameExtensions = excludedFileNameExtensions; + IndexedFileNameExtensions = indexedFileNameExtensions; + FailOnUnsupportedContentType = failOnUnsupportedContentType; + FailOnUnprocessableDocument = failOnUnprocessableDocument; + IndexStorageMetadataOnlyForOversizedDocuments = indexStorageMetadataOnlyForOversizedDocuments; + DelimitedTextHeaders = delimitedTextHeaders; + DelimitedTextDelimiter = delimitedTextDelimiter; + FirstLineContainsHeaders = firstLineContainsHeaders; + MarkdownParsingSubmode = markdownParsingSubmode; + MarkdownHeaderDepth = markdownHeaderDepth; + DocumentRoot = documentRoot; + DataToExtract = dataToExtract; + ImageAction = imageAction; + AllowSkillsetToReadFileData = allowSkillsetToReadFileData; + PdfTextRotationAlgorithm = pdfTextRotationAlgorithm; + ExecutionEnvironment = executionEnvironment; + QueryTimeout = queryTimeout; + AdditionalProperties = additionalProperties; + } + + /// Represents the parsing mode for indexing from an Azure blob data source. + public BlobIndexerParsingMode? ParsingMode { get; set; } + /// + /// Comma-delimited list of filename extensions to ignore when processing from + /// Azure blob storage. For example, you could exclude ".png, .mp4" to skip over + /// those files during indexing. + /// + public string ExcludedFileNameExtensions { get; set; } + /// + /// Comma-delimited list of filename extensions to select when processing from + /// Azure blob storage. For example, you could focus indexing on specific + /// application files ".docx, .pptx, .msg" to specifically include those file + /// types. + /// + public string IndexedFileNameExtensions { get; set; } + /// + /// For Azure blobs, set to false if you want to continue indexing when an + /// unsupported content type is encountered, and you don't know all the content + /// types (file extensions) in advance. + /// + public bool? FailOnUnsupportedContentType { get; set; } + /// + /// For Azure blobs, set to false if you want to continue indexing if a document + /// fails indexing. + /// + public bool? FailOnUnprocessableDocument { get; set; } + /// + /// For Azure blobs, set this property to true to still index storage metadata for + /// blob content that is too large to process. Oversized blobs are treated as + /// errors by default. For limits on blob size, see + /// https://learn.microsoft.com/azure/search/search-limits-quotas-capacity. + /// + public bool? IndexStorageMetadataOnlyForOversizedDocuments { get; set; } + /// + /// For CSV blobs, specifies a comma-delimited list of column headers, useful for + /// mapping source fields to destination fields in an index. + /// + public string DelimitedTextHeaders { get; set; } + /// + /// For CSV blobs, specifies the end-of-line single-character delimiter for CSV + /// files where each line starts a new document (for example, "|"). + /// + public string DelimitedTextDelimiter { get; set; } + /// + /// For CSV blobs, indicates that the first (non-blank) line of each blob contains + /// headers. + /// + public bool? FirstLineContainsHeaders { get; set; } + /// + /// Specifies the submode that will determine whether a markdown file will be + /// parsed into exactly one search document or multiple search documents. Default + /// is `oneToMany`. + /// + public MarkdownParsingSubmode? MarkdownParsingSubmode { get; set; } + /// + /// Specifies the max header depth that will be considered while grouping markdown + /// content. Default is `h6`. + /// + public MarkdownHeaderDepth? MarkdownHeaderDepth { get; set; } + /// + /// For JSON arrays, given a structured or semi-structured document, you can + /// specify a path to the array using this property. + /// + public string DocumentRoot { get; set; } + /// + /// Specifies the data to extract from Azure blob storage and tells the indexer + /// which data to extract from image content when "imageAction" is set to a value + /// other than "none". This applies to embedded image content in a .PDF or other + /// application, or image files such as .jpg and .png, in Azure blobs. + /// + public BlobIndexerDataToExtract? DataToExtract { get; set; } + /// + /// Determines how to process embedded images and image files in Azure blob + /// storage. Setting the "imageAction" configuration to any value other than + /// "none" requires that a skillset also be attached to that indexer. + /// + public BlobIndexerImageAction? ImageAction { get; set; } + /// + /// If true, will create a path //document//file_data that is an object + /// representing the original file data downloaded from your blob data source. + /// This allows you to pass the original file data to a custom skill for processing + /// within the enrichment pipeline, or to the Document Extraction skill. + /// + public bool? AllowSkillsetToReadFileData { get; set; } + /// Determines algorithm for text extraction from PDF files in Azure blob storage. + public Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm? PdfTextRotationAlgorithm { get; set; } + /// Specifies the environment in which the indexer should execute. + public IndexerExecutionEnvironment? ExecutionEnvironment { get; set; } + /// + /// Increases the timeout beyond the 5-minute default for Azure SQL database data + /// sources, specified in the format "hh:mm:ss". + /// + public string QueryTimeout { get; set; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.Serialization.cs new file mode 100644 index 000000000000..7c50ab48c413 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.Serialization.cs @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + public partial class IndexingResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + if (Optional.IsDefined(ErrorMessage)) + { + writer.WritePropertyName("errorMessage"u8); + writer.WriteStringValue(ErrorMessage); + } + writer.WritePropertyName("status"u8); + writer.WriteBooleanValue(Succeeded); + writer.WritePropertyName("statusCode"u8); + writer.WriteNumberValue(Status); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexingResult(document.RootElement, options); + } + + internal static IndexingResult DeserializeIndexingResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string key = default; + string errorMessage = default; + bool status = default; + int statusCode = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("errorMessage"u8)) + { + errorMessage = property.Value.GetString(); + continue; + } + if (property.NameEquals("status"u8)) + { + status = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("statusCode"u8)) + { + statusCode = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexingResult(key, errorMessage, status, statusCode, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexingResult)} does not support writing '{options.Format}' format."); + } + } + + IndexingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexingResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexingResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.cs new file mode 100644 index 000000000000..77c60b1cf63f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingResult.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// Status of an indexing operation for a single document. + public partial class IndexingResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The key of a document that was in the indexing request. + /// + /// A value indicating whether the indexing operation succeeded for the document + /// identified by the key. + /// + /// + /// The status code of the indexing operation. Possible values include: 200 for a + /// successful update or delete, 201 for successful document creation, 400 for a + /// malformed input document, 404 for document not found, 409 for a version + /// conflict, 422 when the index is temporarily unavailable, or 503 for when the + /// service is too busy. + /// + /// is null. + internal IndexingResult(string key, bool succeeded, int status) + { + Argument.AssertNotNull(key, nameof(key)); + + Key = key; + Succeeded = succeeded; + Status = status; + } + + /// Initializes a new instance of . + /// The key of a document that was in the indexing request. + /// + /// The error message explaining why the indexing operation failed for the document + /// identified by the key; null if indexing succeeded. + /// + /// + /// A value indicating whether the indexing operation succeeded for the document + /// identified by the key. + /// + /// + /// The status code of the indexing operation. Possible values include: 200 for a + /// successful update or delete, 201 for successful document creation, 400 for a + /// malformed input document, 404 for document not found, 409 for a version + /// conflict, 422 when the index is temporarily unavailable, or 503 for when the + /// service is too busy. + /// + /// Keeps track of any properties unknown to the library. + internal IndexingResult(string key, string errorMessage, bool succeeded, int status, IDictionary serializedAdditionalRawData) + { + Key = key; + ErrorMessage = errorMessage; + Succeeded = succeeded; + Status = status; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IndexingResult() + { + } + + /// The key of a document that was in the indexing request. + public string Key { get; } + /// + /// The error message explaining why the indexing operation failed for the document + /// identified by the key; null if indexing succeeded. + /// + public string ErrorMessage { get; } + /// + /// A value indicating whether the indexing operation succeeded for the document + /// identified by the key. + /// + public bool Succeeded { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.Serialization.cs new file mode 100644 index 000000000000..0c8cf677c489 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class IndexingSchedule : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingSchedule)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("interval"u8); + writer.WriteStringValue(Interval, "P"); + if (Optional.IsDefined(StartTime)) + { + writer.WritePropertyName("startTime"u8); + writer.WriteStringValue(StartTime.Value, "O"); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + IndexingSchedule IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IndexingSchedule)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIndexingSchedule(document.RootElement, options); + } + + internal static IndexingSchedule DeserializeIndexingSchedule(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TimeSpan interval = default; + DateTimeOffset? startTime = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("interval"u8)) + { + interval = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("startTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + startTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IndexingSchedule(interval, startTime, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IndexingSchedule)} does not support writing '{options.Format}' format."); + } + } + + IndexingSchedule IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingSchedule(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IndexingSchedule)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IndexingSchedule FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeIndexingSchedule(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.cs b/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.cs new file mode 100644 index 000000000000..761dfb40af7d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/IndexingSchedule.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents a schedule for indexer execution. + public partial class IndexingSchedule + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The interval of time between indexer executions. + public IndexingSchedule(TimeSpan interval) + { + Interval = interval; + } + + /// Initializes a new instance of . + /// The interval of time between indexer executions. + /// The time when an indexer should start running. + /// Keeps track of any properties unknown to the library. + internal IndexingSchedule(TimeSpan interval, DateTimeOffset? startTime, IDictionary serializedAdditionalRawData) + { + Interval = interval; + StartTime = startTime; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IndexingSchedule() + { + } + + /// The interval of time between indexer executions. + public TimeSpan Interval { get; set; } + /// The time when an indexer should start running. + public DateTimeOffset? StartTime { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.Serialization.cs new file mode 100644 index 000000000000..1daf8128b9b0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.Serialization.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class InputFieldMappingEntry : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(InputFieldMappingEntry)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(SourceContext)) + { + writer.WritePropertyName("sourceContext"u8); + writer.WriteStringValue(SourceContext); + } + if (Optional.IsCollectionDefined(Inputs)) + { + writer.WritePropertyName("inputs"u8); + writer.WriteStartArray(); + foreach (var item in Inputs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + InputFieldMappingEntry IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(InputFieldMappingEntry)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeInputFieldMappingEntry(document.RootElement, options); + } + + internal static InputFieldMappingEntry DeserializeInputFieldMappingEntry(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new InputFieldMappingEntry(name, source, sourceContext, inputs ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(InputFieldMappingEntry)} does not support writing '{options.Format}' format."); + } + } + + InputFieldMappingEntry IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeInputFieldMappingEntry(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(InputFieldMappingEntry)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static InputFieldMappingEntry FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeInputFieldMappingEntry(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.cs b/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.cs new file mode 100644 index 000000000000..dc6c111d24ed --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/InputFieldMappingEntry.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Input field mapping for a skill. + public partial class InputFieldMappingEntry + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the input. + /// is null. + public InputFieldMappingEntry(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + Inputs = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The name of the input. + /// The source of the input. + /// The source context used for selecting recursive inputs. + /// The recursive inputs used when creating a complex type. + /// Keeps track of any properties unknown to the library. + internal InputFieldMappingEntry(string name, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData) + { + Name = name; + Source = source; + SourceContext = sourceContext; + Inputs = inputs; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal InputFieldMappingEntry() + { + } + + /// The name of the input. + public string Name { get; set; } + /// The source of the input. + public string Source { get; set; } + /// The source context used for selecting recursive inputs. + public string SourceContext { get; set; } + /// The recursive inputs used when creating a complex type. + public IList Inputs { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Internal/ModelSerializationExtensions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Internal/ModelSerializationExtensions.cs index 3b7865e2525f..a147eb93ea06 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Internal/ModelSerializationExtensions.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/Internal/ModelSerializationExtensions.cs @@ -168,13 +168,16 @@ public static void WriteNumberValue(this Utf8JsonWriter writer, DateTimeOffset v writer.WriteNumberValue(value.ToUnixTimeSeconds()); } - public static void WriteObjectValue(this Utf8JsonWriter writer, T value) + public static void WriteObjectValue(this Utf8JsonWriter writer, T value, ModelReaderWriterOptions options = null) { switch (value) { case null: writer.WriteNullValue(); break; + case IJsonModel jsonModel: + jsonModel.Write(writer, options ?? WireOptions); + break; case IUtf8JsonSerializable serializable: serializable.Write(writer); break; @@ -229,7 +232,7 @@ public static void WriteObjectValue(this Utf8JsonWriter writer, T value) foreach (var pair in enumerable) { writer.WritePropertyName(pair.Key); - writer.WriteObjectValue(pair.Value); + writer.WriteObjectValue(pair.Value, options); } writer.WriteEndObject(); break; @@ -237,7 +240,7 @@ public static void WriteObjectValue(this Utf8JsonWriter writer, T value) writer.WriteStartArray(); foreach (var item in objectEnumerable) { - writer.WriteObjectValue(item); + writer.WriteObjectValue(item, options); } writer.WriteEndArray(); break; @@ -249,9 +252,9 @@ public static void WriteObjectValue(this Utf8JsonWriter writer, T value) } } - public static void WriteObjectValue(this Utf8JsonWriter writer, object value) + public static void WriteObjectValue(this Utf8JsonWriter writer, object value, ModelReaderWriterOptions options = null) { - writer.WriteObjectValue(value); + writer.WriteObjectValue(value, options); } internal static class TypeFormatters diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Internal/MultipartFormDataRequestContent.cs b/sdk/search/Azure.Search.Documents/src/Generated/Internal/MultipartFormDataRequestContent.cs deleted file mode 100644 index 376a1979d11a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Internal/MultipartFormDataRequestContent.cs +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Globalization; -using System.IO; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Azure.Search.Documents -{ - internal class MultipartFormDataRequestContent : RequestContent - { - private readonly System.Net.Http.MultipartFormDataContent _multipartContent; - private static readonly Random _random = new Random(); - private static readonly char[] _boundaryValues = "0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz".ToCharArray(); - - public MultipartFormDataRequestContent() - { - _multipartContent = new System.Net.Http.MultipartFormDataContent(CreateBoundary()); - } - - public string ContentType - { - get - { - return _multipartContent.Headers.ContentType.ToString(); - } - } - - internal HttpContent HttpContent => _multipartContent; - - private static string CreateBoundary() - { - Span chars = new char[70]; - byte[] random = new byte[70]; - _random.NextBytes(random); - int mask = 255 >> 2; - for (int i = 0; i < 70; i++) - { - chars[i] = _boundaryValues[random[i] & mask]; - } - return chars.ToString(); - } - - public void Add(string content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - Add(new StringContent(content), name, filename, contentType); - } - - public void Add(int content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content.ToString("G", CultureInfo.InvariantCulture); - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(long content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content.ToString("G", CultureInfo.InvariantCulture); - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(float content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content.ToString("G", CultureInfo.InvariantCulture); - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(double content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content.ToString("G", CultureInfo.InvariantCulture); - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(decimal content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content.ToString("G", CultureInfo.InvariantCulture); - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(bool content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - string value = content ? "true" : "false"; - Add(new StringContent(value), name, filename, contentType); - } - - public void Add(Stream content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - Add(new StreamContent(content), name, filename, contentType); - } - - public void Add(byte[] content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - Add(new ByteArrayContent(content), name, filename, contentType); - } - - public void Add(BinaryData content, string name, string filename = null, string contentType = null) - { - Argument.AssertNotNull(content, nameof(content)); - Argument.AssertNotNullOrEmpty(name, nameof(name)); - - Add(new ByteArrayContent(content.ToArray()), name, filename, contentType); - } - - private void Add(HttpContent content, string name, string filename, string contentType) - { - if (filename != null) - { - Argument.AssertNotNullOrEmpty(filename, nameof(filename)); - AddFilenameHeader(content, name, filename); - } - if (contentType != null) - { - Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); - AddContentTypeHeader(content, contentType); - } - _multipartContent.Add(content, name); - } - - public static void AddFilenameHeader(HttpContent content, string name, string filename) - { - ContentDispositionHeaderValue header = new ContentDispositionHeaderValue("form-data") { Name = name, FileName = filename }; - content.Headers.ContentDisposition = header; - } - - public static void AddContentTypeHeader(HttpContent content, string contentType) - { - MediaTypeHeaderValue header = new MediaTypeHeaderValue(contentType); - content.Headers.ContentType = header; - } - - public override bool TryComputeLength(out long length) - { - if (_multipartContent.Headers.ContentLength is long contentLength) - { - length = contentLength; - return true; - } - length = 0; - return false; - } - - public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) - { -#if NET6_0_OR_GREATER - _multipartContent.CopyTo(stream, default, cancellationToken); -#else -#pragma warning disable AZC0107 - _multipartContent.CopyToAsync(stream).EnsureCompleted(); -#pragma warning restore AZC0107 -#endif - } - - public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) - { -#if NET6_0_OR_GREATER - await _multipartContent.CopyToAsync(stream, cancellationToken).ConfigureAwait(false); -#else - await _multipartContent.CopyToAsync(stream).ConfigureAwait(false); -#endif - } - - public override void Dispose() - { - _multipartContent.Dispose(); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Internal/RequestContentHelper.cs b/sdk/search/Azure.Search.Documents/src/Generated/Internal/RequestContentHelper.cs deleted file mode 100644 index dac5a6a4e346..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Internal/RequestContentHelper.cs +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents -{ - internal static class RequestContentHelper - { - public static RequestContent FromEnumerable(IEnumerable enumerable) - where T : notnull - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteStartArray(); - foreach (var item in enumerable) - { - content.JsonWriter.WriteObjectValue(item); - } - content.JsonWriter.WriteEndArray(); - - return content; - } - - public static RequestContent FromEnumerable(IEnumerable enumerable) - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteStartArray(); - foreach (var item in enumerable) - { - if (item == null) - { - content.JsonWriter.WriteNullValue(); - } - else - { -#if NET6_0_OR_GREATER - content.JsonWriter.WriteRawValue(item); -#else - using (JsonDocument document = JsonDocument.Parse(item, ModelSerializationExtensions.JsonDocumentOptions)) - { - JsonSerializer.Serialize(content.JsonWriter, document.RootElement); - } -#endif - } - } - content.JsonWriter.WriteEndArray(); - - return content; - } - - public static RequestContent FromEnumerable(ReadOnlySpan span) - where T : notnull - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteStartArray(); - for (int i = 0; i < span.Length; i++) - { - content.JsonWriter.WriteObjectValue(span[i]); - } - content.JsonWriter.WriteEndArray(); - - return content; - } - - public static RequestContent FromDictionary(IDictionary dictionary) - where TValue : notnull - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteStartObject(); - foreach (var item in dictionary) - { - content.JsonWriter.WritePropertyName(item.Key); - content.JsonWriter.WriteObjectValue(item.Value); - } - content.JsonWriter.WriteEndObject(); - - return content; - } - - public static RequestContent FromDictionary(IDictionary dictionary) - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteStartObject(); - foreach (var item in dictionary) - { - content.JsonWriter.WritePropertyName(item.Key); - if (item.Value == null) - { - content.JsonWriter.WriteNullValue(); - } - else - { -#if NET6_0_OR_GREATER - content.JsonWriter.WriteRawValue(item.Value); -#else - using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) - { - JsonSerializer.Serialize(content.JsonWriter, document.RootElement); - } -#endif - } - } - content.JsonWriter.WriteEndObject(); - - return content; - } - - public static RequestContent FromObject(object value) - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(value); - return content; - } - - public static RequestContent FromObject(BinaryData value) - { - Utf8JsonRequestContent content = new Utf8JsonRequestContent(); -#if NET6_0_OR_GREATER - content.JsonWriter.WriteRawValue(value); -#else - using (JsonDocument document = JsonDocument.Parse(value, ModelSerializationExtensions.JsonDocumentOptions)) - { - JsonSerializer.Serialize(content.JsonWriter, document.RootElement); - } -#endif - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.Serialization.cs new file mode 100644 index 000000000000..0e3c3dbda97f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class KeepTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeepTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("keepWords"u8); + writer.WriteStartArray(); + foreach (var item in KeepWords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(LowerCaseKeepWords)) + { + writer.WritePropertyName("keepWordsCase"u8); + writer.WriteBooleanValue(LowerCaseKeepWords.Value); + } + } + + KeepTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeepTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeKeepTokenFilter(document.RootElement, options); + } + + internal static KeepTokenFilter DeserializeKeepTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList keepWords = default; + bool? keepWordsCase = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("keepWords"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + keepWords = array; + continue; + } + if (property.NameEquals("keepWordsCase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + keepWordsCase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new KeepTokenFilter(odataType, name, serializedAdditionalRawData, keepWords, keepWordsCase); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(KeepTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + KeepTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeepTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(KeepTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new KeepTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeepTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.cs new file mode 100644 index 000000000000..f2755a4cc0f0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeepTokenFilter.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// A token filter that only keeps tokens with text contained in a specified list + /// of words. This token filter is implemented using Apache Lucene. + /// + public partial class KeepTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// The list of words to keep. + /// or is null. + public KeepTokenFilter(string name, IEnumerable keepWords) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(keepWords, nameof(keepWords)); + + OdataType = "#Microsoft.Azure.Search.KeepTokenFilter"; + KeepWords = keepWords.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The list of words to keep. + /// A value indicating whether to lower case all words first. Default is false. + internal KeepTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList keepWords, bool? lowerCaseKeepWords) : base(odataType, name, serializedAdditionalRawData) + { + KeepWords = keepWords; + LowerCaseKeepWords = lowerCaseKeepWords; + } + + /// Initializes a new instance of for deserialization. + internal KeepTokenFilter() + { + } + + /// The list of words to keep. + public IList KeepWords { get; } + /// A value indicating whether to lower case all words first. Default is false. + public bool? LowerCaseKeepWords { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.Serialization.cs new file mode 100644 index 000000000000..7951f0a7fcd8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.Serialization.cs @@ -0,0 +1,217 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class KeyPhraseExtractionSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeyPhraseExtractionSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(MaxKeyPhraseCount)) + { + writer.WritePropertyName("maxKeyPhraseCount"u8); + writer.WriteNumberValue(MaxKeyPhraseCount.Value); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + } + + KeyPhraseExtractionSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeyPhraseExtractionSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeKeyPhraseExtractionSkill(document.RootElement, options); + } + + internal static KeyPhraseExtractionSkill DeserializeKeyPhraseExtractionSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + KeyPhraseExtractionSkillLanguage? defaultLanguageCode = default; + int? maxKeyPhraseCount = default; + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new KeyPhraseExtractionSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("maxKeyPhraseCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxKeyPhraseCount = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new KeyPhraseExtractionSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + maxKeyPhraseCount, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(KeyPhraseExtractionSkill)} does not support writing '{options.Format}' format."); + } + } + + KeyPhraseExtractionSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeyPhraseExtractionSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(KeyPhraseExtractionSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new KeyPhraseExtractionSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeyPhraseExtractionSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.cs new file mode 100644 index 000000000000..e53e07836035 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkill.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill that uses text analytics for key phrase extraction. + public partial class KeyPhraseExtractionSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public KeyPhraseExtractionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// + /// A number indicating how many key phrases to return. If absent, all identified + /// key phrases will be returned. + /// + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + internal KeyPhraseExtractionSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, KeyPhraseExtractionSkillLanguage? defaultLanguageCode, int? maxKeyPhraseCount, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + MaxKeyPhraseCount = maxKeyPhraseCount; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal KeyPhraseExtractionSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public KeyPhraseExtractionSkillLanguage? DefaultLanguageCode { get; set; } + /// + /// A number indicating how many key phrases to return. If absent, all identified + /// key phrases will be returned. + /// + public int? MaxKeyPhraseCount { get; set; } + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkillLanguage.cs index 878941202103..0561a6d1d87e 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeyPhraseExtractionSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input text by KeyPhraseExtractionSkill. public readonly partial struct KeyPhraseExtractionSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.Serialization.cs new file mode 100644 index 000000000000..37c1d860de31 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class KeywordMarkerTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeywordMarkerTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("keywords"u8); + writer.WriteStartArray(); + foreach (var item in Keywords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(IgnoreCase)) + { + writer.WritePropertyName("ignoreCase"u8); + writer.WriteBooleanValue(IgnoreCase.Value); + } + } + + KeywordMarkerTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KeywordMarkerTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeKeywordMarkerTokenFilter(document.RootElement, options); + } + + internal static KeywordMarkerTokenFilter DeserializeKeywordMarkerTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList keywords = default; + bool? ignoreCase = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("keywords"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + keywords = array; + continue; + } + if (property.NameEquals("ignoreCase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreCase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new KeywordMarkerTokenFilter(odataType, name, serializedAdditionalRawData, keywords, ignoreCase); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(KeywordMarkerTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + KeywordMarkerTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeywordMarkerTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(KeywordMarkerTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new KeywordMarkerTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeKeywordMarkerTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.cs new file mode 100644 index 000000000000..370b0396580d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeywordMarkerTokenFilter.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Marks terms as keywords. This token filter is implemented using Apache Lucene. + public partial class KeywordMarkerTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// A list of words to mark as keywords. + /// or is null. + public KeywordMarkerTokenFilter(string name, IEnumerable keywords) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(keywords, nameof(keywords)); + + OdataType = "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; + Keywords = keywords.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A list of words to mark as keywords. + /// + /// A value indicating whether to ignore case. If true, all words are converted to + /// lower case first. Default is false. + /// + internal KeywordMarkerTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList keywords, bool? ignoreCase) : base(odataType, name, serializedAdditionalRawData) + { + Keywords = keywords; + IgnoreCase = ignoreCase; + } + + /// Initializes a new instance of for deserialization. + internal KeywordMarkerTokenFilter() + { + } + + /// A list of words to mark as keywords. + public IList Keywords { get; } + /// + /// A value indicating whether to ignore case. If true, all words are converted to + /// lower case first. Default is false. + /// + public bool? IgnoreCase { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.Serialization.cs new file mode 100644 index 000000000000..988a2b57b465 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KeywordTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KeywordTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + } + + Search.Documents.Indexes.Models.KeywordTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KeywordTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KeywordTokenizer.DeserializeKeywordTokenizer(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KeywordTokenizer DeserializeKeywordTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KeywordTokenizer(odataType, name, serializedAdditionalRawData, maxTokenLength); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KeywordTokenizer)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KeywordTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KeywordTokenizer.DeserializeKeywordTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KeywordTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.KeywordTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KeywordTokenizer.DeserializeKeywordTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.cs new file mode 100644 index 000000000000..572721dbcd72 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KeywordTokenizer.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Emits the entire input as a single token. This tokenizer is implemented using + /// Apache Lucene. + /// + public partial class KeywordTokenizer : Search.Documents.LexicalTokenizer + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Default is 256. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + internal KeywordTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + } + + /// Initializes a new instance of for deserialization. + internal KeywordTokenizer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.Serialization.cs new file mode 100644 index 000000000000..623cc632e192 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.Serialization.cs @@ -0,0 +1,190 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStore : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStore)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("storageConnectionString"u8); + writer.WriteStringValue(StorageConnectionString); + writer.WritePropertyName("projections"u8); + writer.WriteStartArray(); + foreach (var item in Projections) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(Identity)) + { + writer.WritePropertyName("identity"u8); + writer.WriteObjectValue(Identity, options); + } + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteObjectValue(Parameters, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.KnowledgeStore IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStore)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStore.DeserializeKnowledgeStore(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStore DeserializeKnowledgeStore(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string storageConnectionString = default; + IList projections = default; + Search.Documents.SearchIndexerDataIdentity identity = default; + SearchIndexerKnowledgeStoreParameters parameters = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("storageConnectionString"u8)) + { + storageConnectionString = property.Value.GetString(); + continue; + } + if (property.NameEquals("projections"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.Indexes.Models.KnowledgeStoreProjection.DeserializeKnowledgeStoreProjection(item, options)); + } + projections = array; + continue; + } + if (property.NameEquals("identity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + identity = Search.Documents.SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + parameters = SearchIndexerKnowledgeStoreParameters.DeserializeSearchIndexerKnowledgeStoreParameters(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStore(storageConnectionString, projections, identity, parameters, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStore)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStore IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStore.DeserializeKnowledgeStore(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStore)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.KnowledgeStore FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStore.DeserializeKnowledgeStore(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.cs new file mode 100644 index 000000000000..0a55d49bd30d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStore.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Definition of additional projections to azure blob, table, or files, of + /// enriched data. + /// + public partial class KnowledgeStore + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The connection string to the storage account projections will be stored in. + /// A list of additional projections to perform during indexing. + /// or is null. + public KnowledgeStore(string storageConnectionString, IEnumerable projections) + { + Argument.AssertNotNull(storageConnectionString, nameof(storageConnectionString)); + Argument.AssertNotNull(projections, nameof(projections)); + + StorageConnectionString = storageConnectionString; + Projections = projections.ToList(); + } + + /// Initializes a new instance of . + /// The connection string to the storage account projections will be stored in. + /// A list of additional projections to perform during indexing. + /// + /// The user-assigned managed identity used for connections to Azure Storage when + /// writing knowledge store projections. If the connection string indicates an + /// identity (ResourceId) and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// A dictionary of knowledge store-specific configuration properties. Each name is + /// the name of a specific property. Each value must be of a primitive type. + /// + /// Keeps track of any properties unknown to the library. + internal KnowledgeStore(string storageConnectionString, IList projections, Search.Documents.SearchIndexerDataIdentity identity, SearchIndexerKnowledgeStoreParameters parameters, IDictionary serializedAdditionalRawData) + { + StorageConnectionString = storageConnectionString; + Projections = projections; + Identity = identity; + Parameters = parameters; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal KnowledgeStore() + { + } + + /// The connection string to the storage account projections will be stored in. + public string StorageConnectionString { get; set; } + /// A list of additional projections to perform during indexing. + public IList Projections { get; } + /// + /// The user-assigned managed identity used for connections to Azure Storage when + /// writing knowledge store projections. If the connection string indicates an + /// identity (ResourceId) and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public Search.Documents.SearchIndexerDataIdentity Identity { get; set; } + /// + /// A dictionary of knowledge store-specific configuration properties. Each name is + /// the name of a specific property. Each value must be of a primitive type. + /// + public SearchIndexerKnowledgeStoreParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..553c27436b85 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreFileProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector.DeserializeKnowledgeStoreFileProjectionSelector(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector DeserializeKnowledgeStoreFileProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string storageContainer = default; + string referenceKeyName = default; + string generatedKeyName = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("storageContainer"u8)) + { + storageContainer = property.Value.GetString(); + continue; + } + if (property.NameEquals("referenceKeyName"u8)) + { + referenceKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("generatedKeyName"u8)) + { + generatedKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector( + referenceKeyName, + generatedKeyName, + source, + sourceContext, + inputs ?? new ChangeTrackingList(), + serializedAdditionalRawData, + storageContainer); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector.DeserializeKnowledgeStoreFileProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector.DeserializeKnowledgeStoreFileProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.cs new file mode 100644 index 000000000000..fa874294d777 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreFileProjectionSelector.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Projection definition for what data to store in Azure Files. + public partial class KnowledgeStoreFileProjectionSelector : Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector + { + /// Initializes a new instance of . + /// Blob container to store projections in. + /// is null. + public KnowledgeStoreFileProjectionSelector(string storageContainer) : base(storageContainer) + { + Argument.AssertNotNull(storageContainer, nameof(storageContainer)); + } + + /// Initializes a new instance of . + /// Name of reference key to different projection. + /// Name of generated key to store projection under. + /// Source data to project. + /// Source context for complex projections. + /// Nested inputs for complex projections. + /// Keeps track of any properties unknown to the library. + /// Blob container to store projections in. + internal KnowledgeStoreFileProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, serializedAdditionalRawData, storageContainer) + { + } + + /// Initializes a new instance of for deserialization. + internal KnowledgeStoreFileProjectionSelector() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..0877fab96e93 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreObjectProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector.DeserializeKnowledgeStoreObjectProjectionSelector(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector DeserializeKnowledgeStoreObjectProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string storageContainer = default; + string referenceKeyName = default; + string generatedKeyName = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("storageContainer"u8)) + { + storageContainer = property.Value.GetString(); + continue; + } + if (property.NameEquals("referenceKeyName"u8)) + { + referenceKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("generatedKeyName"u8)) + { + generatedKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector( + referenceKeyName, + generatedKeyName, + source, + sourceContext, + inputs ?? new ChangeTrackingList(), + serializedAdditionalRawData, + storageContainer); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector.DeserializeKnowledgeStoreObjectProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector.DeserializeKnowledgeStoreObjectProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.cs new file mode 100644 index 000000000000..acce0559c699 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreObjectProjectionSelector.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Projection definition for what data to store in Azure Blob. + public partial class KnowledgeStoreObjectProjectionSelector : Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector + { + /// Initializes a new instance of . + /// Blob container to store projections in. + /// is null. + public KnowledgeStoreObjectProjectionSelector(string storageContainer) : base(storageContainer) + { + Argument.AssertNotNull(storageContainer, nameof(storageContainer)); + } + + /// Initializes a new instance of . + /// Name of reference key to different projection. + /// Name of generated key to store projection under. + /// Source data to project. + /// Source context for complex projections. + /// Nested inputs for complex projections. + /// Keeps track of any properties unknown to the library. + /// Blob container to store projections in. + internal KnowledgeStoreObjectProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, serializedAdditionalRawData, storageContainer) + { + } + + /// Initializes a new instance of for deserialization. + internal KnowledgeStoreObjectProjectionSelector() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.Serialization.cs new file mode 100644 index 000000000000..0dc1a50abc79 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.Serialization.cs @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreProjection : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjection)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(Tables)) + { + writer.WritePropertyName("tables"u8); + writer.WriteStartArray(); + foreach (var item in Tables) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Objects)) + { + writer.WritePropertyName("objects"u8); + writer.WriteStartArray(); + foreach (var item in Objects) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Files)) + { + writer.WritePropertyName("files"u8); + writer.WriteStartArray(); + foreach (var item in Files) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreProjection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjection)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreProjection.DeserializeKnowledgeStoreProjection(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreProjection DeserializeKnowledgeStoreProjection(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList tables = default; + IList objects = default; + IList files = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tables"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector.DeserializeKnowledgeStoreTableProjectionSelector(item, options)); + } + tables = array; + continue; + } + if (property.NameEquals("objects"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.Indexes.Models.KnowledgeStoreObjectProjectionSelector.DeserializeKnowledgeStoreObjectProjectionSelector(item, options)); + } + objects = array; + continue; + } + if (property.NameEquals("files"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.Indexes.Models.KnowledgeStoreFileProjectionSelector.DeserializeKnowledgeStoreFileProjectionSelector(item, options)); + } + files = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreProjection(tables ?? new ChangeTrackingList(), objects ?? new ChangeTrackingList(), files ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjection)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreProjection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreProjection.DeserializeKnowledgeStoreProjection(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjection)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.KnowledgeStoreProjection FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreProjection.DeserializeKnowledgeStoreProjection(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.cs new file mode 100644 index 000000000000..16bbc67251bc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjection.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Container object for various projection selectors. + public partial class KnowledgeStoreProjection + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public KnowledgeStoreProjection() + { + Tables = new ChangeTrackingList(); + Objects = new ChangeTrackingList(); + Files = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Projections to Azure Table storage. + /// Projections to Azure Blob storage. + /// Projections to Azure File storage. + /// Keeps track of any properties unknown to the library. + internal KnowledgeStoreProjection(IList tables, IList objects, IList files, IDictionary serializedAdditionalRawData) + { + Tables = tables; + Objects = objects; + Files = files; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Projections to Azure Table storage. + public IList Tables { get; } + /// Projections to Azure Blob storage. + public IList Objects { get; } + /// Projections to Azure File storage. + public IList Files { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..9edb362a7de0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.Serialization.cs @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ReferenceKeyName)) + { + writer.WritePropertyName("referenceKeyName"u8); + writer.WriteStringValue(ReferenceKeyName); + } + if (Optional.IsDefined(GeneratedKeyName)) + { + writer.WritePropertyName("generatedKeyName"u8); + writer.WriteStringValue(GeneratedKeyName); + } + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(SourceContext)) + { + writer.WritePropertyName("sourceContext"u8); + writer.WriteStringValue(SourceContext); + } + if (Optional.IsCollectionDefined(Inputs)) + { + writer.WritePropertyName("inputs"u8); + writer.WriteStartArray(); + foreach (var item in Inputs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector.DeserializeKnowledgeStoreProjectionSelector(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector DeserializeKnowledgeStoreProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string referenceKeyName = default; + string generatedKeyName = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("referenceKeyName"u8)) + { + referenceKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("generatedKeyName"u8)) + { + generatedKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector( + referenceKeyName, + generatedKeyName, + source, + sourceContext, + inputs ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector.DeserializeKnowledgeStoreProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector.DeserializeKnowledgeStoreProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.cs new file mode 100644 index 000000000000..ccff3b993ea5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreProjectionSelector.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Abstract class to share properties between concrete selectors. + public partial class KnowledgeStoreProjectionSelector + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public KnowledgeStoreProjectionSelector() + { + Inputs = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Name of reference key to different projection. + /// Name of generated key to store projection under. + /// Source data to project. + /// Source context for complex projections. + /// Nested inputs for complex projections. + /// Keeps track of any properties unknown to the library. + internal KnowledgeStoreProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData) + { + ReferenceKeyName = referenceKeyName; + GeneratedKeyName = generatedKeyName; + Source = source; + SourceContext = sourceContext; + Inputs = inputs; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Name of reference key to different projection. + public string ReferenceKeyName { get; set; } + /// Name of generated key to store projection under. + public string GeneratedKeyName { get; set; } + /// Source data to project. + public string Source { get; set; } + /// Source context for complex projections. + public string SourceContext { get; set; } + /// Nested inputs for complex projections. + public IList Inputs { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..9c8289714338 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreStorageProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("storageContainer"u8); + writer.WriteStringValue(StorageContainer); + } + + Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector.DeserializeKnowledgeStoreStorageProjectionSelector(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector DeserializeKnowledgeStoreStorageProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string storageContainer = default; + string referenceKeyName = default; + string generatedKeyName = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("storageContainer"u8)) + { + storageContainer = property.Value.GetString(); + continue; + } + if (property.NameEquals("referenceKeyName"u8)) + { + referenceKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("generatedKeyName"u8)) + { + generatedKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector( + referenceKeyName, + generatedKeyName, + source, + sourceContext, + inputs ?? new ChangeTrackingList(), + serializedAdditionalRawData, + storageContainer); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector.DeserializeKnowledgeStoreStorageProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreStorageProjectionSelector.DeserializeKnowledgeStoreStorageProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.cs new file mode 100644 index 000000000000..70ba645a9610 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreStorageProjectionSelector.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Abstract class to share properties between concrete selectors. + public partial class KnowledgeStoreStorageProjectionSelector : Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector + { + /// Initializes a new instance of . + /// Blob container to store projections in. + /// is null. + public KnowledgeStoreStorageProjectionSelector(string storageContainer) + { + Argument.AssertNotNull(storageContainer, nameof(storageContainer)); + + StorageContainer = storageContainer; + } + + /// Initializes a new instance of . + /// Name of reference key to different projection. + /// Name of generated key to store projection under. + /// Source data to project. + /// Source context for complex projections. + /// Nested inputs for complex projections. + /// Keeps track of any properties unknown to the library. + /// Blob container to store projections in. + internal KnowledgeStoreStorageProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, serializedAdditionalRawData) + { + StorageContainer = storageContainer; + } + + /// Initializes a new instance of for deserialization. + internal KnowledgeStoreStorageProjectionSelector() + { + } + + /// Blob container to store projections in. + public string StorageContainer { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..710a68f4fbe6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class KnowledgeStoreTableProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("tableName"u8); + writer.WriteStringValue(TableName); + } + + Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector.DeserializeKnowledgeStoreTableProjectionSelector(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector DeserializeKnowledgeStoreTableProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string tableName = default; + string referenceKeyName = default; + string generatedKeyName = default; + string source = default; + string sourceContext = default; + IList inputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tableName"u8)) + { + tableName = property.Value.GetString(); + continue; + } + if (property.NameEquals("referenceKeyName"u8)) + { + referenceKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("generatedKeyName"u8)) + { + generatedKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("source"u8)) + { + source = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector( + referenceKeyName, + generatedKeyName, + source, + sourceContext, + inputs ?? new ChangeTrackingList(), + serializedAdditionalRawData, + tableName); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector.DeserializeKnowledgeStoreTableProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.KnowledgeStoreTableProjectionSelector.DeserializeKnowledgeStoreTableProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.cs new file mode 100644 index 000000000000..626965fd2ec0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/KnowledgeStoreTableProjectionSelector.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Description for what data to store in Azure Tables. + public partial class KnowledgeStoreTableProjectionSelector : Search.Documents.Indexes.Models.KnowledgeStoreProjectionSelector + { + /// Initializes a new instance of . + /// Name of the Azure table to store projected data in. + /// is null. + public KnowledgeStoreTableProjectionSelector(string tableName) + { + Argument.AssertNotNull(tableName, nameof(tableName)); + + TableName = tableName; + } + + /// Initializes a new instance of . + /// Name of reference key to different projection. + /// Name of generated key to store projection under. + /// Source data to project. + /// Source context for complex projections. + /// Nested inputs for complex projections. + /// Keeps track of any properties unknown to the library. + /// Name of the Azure table to store projected data in. + internal KnowledgeStoreTableProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, IDictionary serializedAdditionalRawData, string tableName) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, serializedAdditionalRawData) + { + TableName = tableName; + } + + /// Initializes a new instance of for deserialization. + internal KnowledgeStoreTableProjectionSelector() + { + } + + /// Name of the Azure table to store projected data in. + public string TableName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.Serialization.cs new file mode 100644 index 000000000000..3fbdb86f8b35 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class LanguageDetectionSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LanguageDetectionSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultCountryHint)) + { + writer.WritePropertyName("defaultCountryHint"u8); + writer.WriteStringValue(DefaultCountryHint); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + } + + LanguageDetectionSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LanguageDetectionSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLanguageDetectionSkill(document.RootElement, options); + } + + internal static LanguageDetectionSkill DeserializeLanguageDetectionSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string defaultCountryHint = default; + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultCountryHint"u8)) + { + defaultCountryHint = property.Value.GetString(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LanguageDetectionSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultCountryHint, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LanguageDetectionSkill)} does not support writing '{options.Format}' format."); + } + } + + LanguageDetectionSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLanguageDetectionSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LanguageDetectionSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new LanguageDetectionSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLanguageDetectionSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.cs new file mode 100644 index 000000000000..a970bb79b723 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LanguageDetectionSkill.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill that detects the language of input text and reports a single language + /// code for every document submitted on the request. The language code is paired + /// with a score indicating the confidence of the analysis. + /// + public partial class LanguageDetectionSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public LanguageDetectionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.LanguageDetectionSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A country code to use as a hint to the language detection model if it cannot + /// disambiguate the language. + /// + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + internal LanguageDetectionSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string defaultCountryHint, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultCountryHint = defaultCountryHint; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal LanguageDetectionSkill() + { + } + + /// + /// A country code to use as a hint to the language detection model if it cannot + /// disambiguate the language. + /// + public string DefaultCountryHint { get; set; } + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.Serialization.cs new file mode 100644 index 000000000000..e7b3c08c50ae --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class LengthTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LengthTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MinLength)) + { + writer.WritePropertyName("min"u8); + writer.WriteNumberValue(MinLength.Value); + } + if (Optional.IsDefined(MaxLength)) + { + writer.WritePropertyName("max"u8); + writer.WriteNumberValue(MaxLength.Value); + } + } + + LengthTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LengthTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLengthTokenFilter(document.RootElement, options); + } + + internal static LengthTokenFilter DeserializeLengthTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? min = default; + int? max = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("min"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + min = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("max"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + max = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LengthTokenFilter(odataType, name, serializedAdditionalRawData, min, max); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LengthTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + LengthTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLengthTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LengthTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new LengthTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLengthTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.cs new file mode 100644 index 000000000000..cfa141b83b63 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LengthTokenFilter.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Removes words that are too long or too short. This token filter is implemented + /// using Apache Lucene. + /// + public partial class LengthTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public LengthTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.LengthTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The minimum length in characters. Default is 0. Maximum is 300. Must be less + /// than the value of max. + /// + /// The maximum length in characters. Default and maximum is 300. + internal LengthTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? minLength, int? maxLength) : base(odataType, name, serializedAdditionalRawData) + { + MinLength = minLength; + MaxLength = maxLength; + } + + /// Initializes a new instance of for deserialization. + internal LengthTokenFilter() + { + } + + /// + /// The minimum length in characters. Default is 0. Maximum is 300. Must be less + /// than the value of max. + /// + public int? MinLength { get; set; } + /// The maximum length in characters. Default and maximum is 300. + public int? MaxLength { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.Serialization.cs new file mode 100644 index 000000000000..3ddeb55bf94c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.Serialization.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownLexicalAnalyzer))] + public partial class LexicalAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + LexicalAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalAnalyzer(document.RootElement, options); + } + + internal static LexicalAnalyzer DeserializeLexicalAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.CustomAnalyzer": return CustomAnalyzer.DeserializeCustomAnalyzer(element, options); + case "#Microsoft.Azure.Search.PatternAnalyzer": return PatternAnalyzer.DeserializePatternAnalyzer(element, options); + case "#Microsoft.Azure.Search.StandardAnalyzer": return LuceneStandardAnalyzer.DeserializeLuceneStandardAnalyzer(element, options); + case "#Microsoft.Azure.Search.StopAnalyzer": return StopAnalyzer.DeserializeStopAnalyzer(element, options); + } + } + return UnknownLexicalAnalyzer.DeserializeUnknownLexicalAnalyzer(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + LexicalAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LexicalAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalAnalyzer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.cs new file mode 100644 index 000000000000..ab840e8796de --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzer.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for analyzers. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public abstract partial class LexicalAnalyzer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + protected LexicalAnalyzer(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal LexicalAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LexicalAnalyzer() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + public string Name { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzerName.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzerName.cs similarity index 93% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzerName.cs rename to sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzerName.cs index 52e8ebb04daf..a020b7f4e666 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzerName.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalAnalyzerName.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Defines the names of all text analyzers supported by the search engine. public readonly partial struct LexicalAnalyzerName : IEquatable @@ -290,17 +290,37 @@ public LexicalAnalyzerName(string value) public static LexicalAnalyzerName ViMicrosoft { get; } = new LexicalAnalyzerName(ViMicrosoftValue); /// Standard Lucene analyzer. public static LexicalAnalyzerName StandardLucene { get; } = new LexicalAnalyzerName(StandardLuceneValue); - /// Standard ASCII Folding Lucene analyzer. See https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#Analyzers. + /// + /// Standard ASCII Folding Lucene analyzer. See + /// https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#Analyzers + /// public static LexicalAnalyzerName StandardAsciiFoldingLucene { get; } = new LexicalAnalyzerName(StandardAsciiFoldingLuceneValue); - /// Treats the entire content of a field as a single token. This is useful for data like zip codes, ids, and some product names. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordAnalyzer.html. + /// + /// Treats the entire content of a field as a single token. This is useful for data + /// like zip codes, ids, and some product names. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordAnalyzer.html + /// public static LexicalAnalyzerName Keyword { get; } = new LexicalAnalyzerName(KeywordValue); - /// Flexibly separates text into terms via a regular expression pattern. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/PatternAnalyzer.html. + /// + /// Flexibly separates text into terms via a regular expression pattern. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/PatternAnalyzer.html + /// public static LexicalAnalyzerName Pattern { get; } = new LexicalAnalyzerName(PatternValue); - /// Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/SimpleAnalyzer.html. + /// + /// Divides text at non-letters and converts them to lower case. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/SimpleAnalyzer.html + /// public static LexicalAnalyzerName Simple { get; } = new LexicalAnalyzerName(SimpleValue); - /// Divides text at non-letters; Applies the lowercase and stopword token filters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopAnalyzer.html. + /// + /// Divides text at non-letters; Applies the lowercase and stopword token filters. + /// See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopAnalyzer.html + /// public static LexicalAnalyzerName Stop { get; } = new LexicalAnalyzerName(StopValue); - /// An analyzer that uses the whitespace tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceAnalyzer.html. + /// + /// An analyzer that uses the whitespace tokenizer. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceAnalyzer.html + /// public static LexicalAnalyzerName Whitespace { get; } = new LexicalAnalyzerName(WhitespaceValue); /// Determines if two values are the same. public static bool operator ==(LexicalAnalyzerName left, LexicalAnalyzerName right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.Serialization.cs new file mode 100644 index 000000000000..abc256852c8c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownLexicalNormalizer))] + public partial class LexicalNormalizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + LexicalNormalizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalNormalizer(document.RootElement, options); + } + + internal static LexicalNormalizer DeserializeLexicalNormalizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.CustomNormalizer": return CustomNormalizer.DeserializeCustomNormalizer(element, options); + } + } + return UnknownLexicalNormalizer.DeserializeUnknownLexicalNormalizer(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support writing '{options.Format}' format."); + } + } + + LexicalNormalizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalNormalizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LexicalNormalizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalNormalizer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.cs new file mode 100644 index 000000000000..7286c6f7a314 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizer.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for normalizers. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + public abstract partial class LexicalNormalizer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + protected LexicalNormalizer(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal LexicalNormalizer(string odataType, string name, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LexicalNormalizer() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + public string Name { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizerName.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizerName.cs new file mode 100644 index 000000000000..8cae1971d825 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalNormalizerName.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Defines the names of all text normalizers supported by the search engine. + public readonly partial struct LexicalNormalizerName : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public LexicalNormalizerName(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string AsciiFoldingValue = "asciifolding"; + private const string ElisionValue = "elision"; + private const string LowercaseValue = "lowercase"; + private const string StandardValue = "standard"; + private const string UppercaseValue = "uppercase"; + + /// + /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in + /// the first 127 ASCII characters (the "Basic Latin" Unicode block) into their + /// ASCII equivalents, if such equivalents exist. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html + /// + public static LexicalNormalizerName AsciiFolding { get; } = new LexicalNormalizerName(AsciiFoldingValue); + /// + /// Removes elisions. For example, "l'avion" (the plane) will be converted to + /// "avion" (plane). See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html + /// + public static LexicalNormalizerName Elision { get; } = new LexicalNormalizerName(ElisionValue); + /// + /// Normalizes token text to lowercase. See + /// https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html + /// + public static LexicalNormalizerName Lowercase { get; } = new LexicalNormalizerName(LowercaseValue); + /// + /// Standard normalizer, which consists of lowercase and asciifolding. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html + /// + public static LexicalNormalizerName Standard { get; } = new LexicalNormalizerName(StandardValue); + /// + /// Normalizes token text to uppercase. See + /// https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html + /// + public static LexicalNormalizerName Uppercase { get; } = new LexicalNormalizerName(UppercaseValue); + /// Determines if two values are the same. + public static bool operator ==(LexicalNormalizerName left, LexicalNormalizerName right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(LexicalNormalizerName left, LexicalNormalizerName right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator LexicalNormalizerName(string value) => new LexicalNormalizerName(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is LexicalNormalizerName other && Equals(other); + /// + public bool Equals(LexicalNormalizerName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.Serialization.cs new file mode 100644 index 000000000000..77ad1032ec98 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownLexicalTokenizer))] + public partial class LexicalTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + LexicalTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalTokenizer(document.RootElement, options); + } + + internal static LexicalTokenizer DeserializeLexicalTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.ClassicTokenizer": return ClassicTokenizer.DeserializeClassicTokenizer(element, options); + case "#Microsoft.Azure.Search.EdgeNGramTokenizer": return EdgeNGramTokenizer.DeserializeEdgeNGramTokenizer(element, options); + case "#Microsoft.Azure.Search.KeywordTokenizerV2": return Search.Documents.Indexes.Models.KeywordTokenizer.DeserializeKeywordTokenizer(element, options); + case "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer": return MicrosoftLanguageStemmingTokenizer.DeserializeMicrosoftLanguageStemmingTokenizer(element, options); + case "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer": return MicrosoftLanguageTokenizer.DeserializeMicrosoftLanguageTokenizer(element, options); + case "#Microsoft.Azure.Search.NGramTokenizer": return NGramTokenizer.DeserializeNGramTokenizer(element, options); + case "#Microsoft.Azure.Search.PathHierarchyTokenizerV2": return Search.Documents.Indexes.Models.PathHierarchyTokenizer.DeserializePathHierarchyTokenizer(element, options); + case "#Microsoft.Azure.Search.PatternTokenizer": return PatternTokenizer.DeserializePatternTokenizer(element, options); + case "#Microsoft.Azure.Search.StandardTokenizerV2": return Search.Documents.Indexes.Models.LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(element, options); + case "#Microsoft.Azure.Search.UaxUrlEmailTokenizer": return UaxUrlEmailTokenizer.DeserializeUaxUrlEmailTokenizer(element, options); + } + } + return UnknownLexicalTokenizer.DeserializeUnknownLexicalTokenizer(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support writing '{options.Format}' format."); + } + } + + LexicalTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LexicalTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalTokenizer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.cs new file mode 100644 index 000000000000..f47c5457f170 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizer.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Base type for tokenizers. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , and . + /// + public abstract partial class LexicalTokenizer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + protected LexicalTokenizer(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal LexicalTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LexicalTokenizer() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + public string Name { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizerName.cs b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizerName.cs new file mode 100644 index 000000000000..3590a65cccd9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LexicalTokenizerName.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Defines the names of all tokenizers supported by the search engine. + public readonly partial struct LexicalTokenizerName : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public LexicalTokenizerName(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ClassicValue = "classic"; + private const string EdgeNGramValue = "edgeNGram"; + private const string KeywordValue = "keyword_v2"; + private const string LetterValue = "letter"; + private const string LowercaseValue = "lowercase"; + private const string MicrosoftLanguageTokenizerValue = "microsoft_language_tokenizer"; + private const string MicrosoftLanguageStemmingTokenizerValue = "microsoft_language_stemming_tokenizer"; + private const string NGramValue = "nGram"; + private const string PathHierarchyValue = "path_hierarchy_v2"; + private const string PatternValue = "pattern"; + private const string StandardValue = "standard_v2"; + private const string UaxUrlEmailValue = "uax_url_email"; + private const string WhitespaceValue = "whitespace"; + + /// + /// Grammar-based tokenizer that is suitable for processing most European-language + /// documents. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicTokenizer.html + /// + public static LexicalTokenizerName Classic { get; } = new LexicalTokenizerName(ClassicValue); + /// + /// Tokenizes the input from an edge into n-grams of the given size(s). See + /// https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenizer.html + /// + public static LexicalTokenizerName EdgeNGram { get; } = new LexicalTokenizerName(EdgeNGramValue); + /// + /// Emits the entire input as a single token. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordTokenizer.html + /// + public static LexicalTokenizerName Keyword { get; } = new LexicalTokenizerName(KeywordValue); + /// + /// Divides text at non-letters. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LetterTokenizer.html + /// + public static LexicalTokenizerName Letter { get; } = new LexicalTokenizerName(LetterValue); + /// + /// Divides text at non-letters and converts them to lower case. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LowerCaseTokenizer.html + /// + public static LexicalTokenizerName Lowercase { get; } = new LexicalTokenizerName(LowercaseValue); + /// Divides text using language-specific rules. + public static LexicalTokenizerName MicrosoftLanguageTokenizer { get; } = new LexicalTokenizerName(MicrosoftLanguageTokenizerValue); + /// + /// Divides text using language-specific rules and reduces words to their base + /// forms. + /// + public static LexicalTokenizerName MicrosoftLanguageStemmingTokenizer { get; } = new LexicalTokenizerName(MicrosoftLanguageStemmingTokenizerValue); + /// + /// Tokenizes the input into n-grams of the given size(s). See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenizer.html + /// + public static LexicalTokenizerName NGram { get; } = new LexicalTokenizerName(NGramValue); + /// + /// Tokenizer for path-like hierarchies. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/path/PathHierarchyTokenizer.html + /// + public static LexicalTokenizerName PathHierarchy { get; } = new LexicalTokenizerName(PathHierarchyValue); + /// + /// Tokenizer that uses regex pattern matching to construct distinct tokens. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/pattern/PatternTokenizer.html + /// + public static LexicalTokenizerName Pattern { get; } = new LexicalTokenizerName(PatternValue); + /// + /// Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter + /// and stop filter. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/StandardTokenizer.html + /// + public static LexicalTokenizerName Standard { get; } = new LexicalTokenizerName(StandardValue); + /// + /// Tokenizes urls and emails as one token. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.html + /// + public static LexicalTokenizerName UaxUrlEmail { get; } = new LexicalTokenizerName(UaxUrlEmailValue); + /// + /// Divides text at whitespace. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceTokenizer.html + /// + public static LexicalTokenizerName Whitespace { get; } = new LexicalTokenizerName(WhitespaceValue); + /// Determines if two values are the same. + public static bool operator ==(LexicalTokenizerName left, LexicalTokenizerName right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(LexicalTokenizerName left, LexicalTokenizerName right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator LexicalTokenizerName(string value) => new LexicalTokenizerName(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is LexicalTokenizerName other && Equals(other); + /// + public bool Equals(LexicalTokenizerName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.Serialization.cs new file mode 100644 index 000000000000..d200270a70a9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class LimitTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LimitTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenCount)) + { + writer.WritePropertyName("maxTokenCount"u8); + writer.WriteNumberValue(MaxTokenCount.Value); + } + if (Optional.IsDefined(ConsumeAllTokens)) + { + writer.WritePropertyName("consumeAllTokens"u8); + writer.WriteBooleanValue(ConsumeAllTokens.Value); + } + } + + LimitTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LimitTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLimitTokenFilter(document.RootElement, options); + } + + internal static LimitTokenFilter DeserializeLimitTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenCount = default; + bool? consumeAllTokens = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenCount = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("consumeAllTokens"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + consumeAllTokens = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LimitTokenFilter(odataType, name, serializedAdditionalRawData, maxTokenCount, consumeAllTokens); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LimitTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + LimitTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLimitTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LimitTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new LimitTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLimitTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.cs new file mode 100644 index 000000000000..761e6624c118 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LimitTokenFilter.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Limits the number of tokens while indexing. This token filter is implemented + /// using Apache Lucene. + /// + public partial class LimitTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public LimitTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.LimitTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The maximum number of tokens to produce. Default is 1. + /// + /// A value indicating whether all tokens from the input must be consumed even if + /// maxTokenCount is reached. Default is false. + /// + internal LimitTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenCount, bool? consumeAllTokens) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenCount = maxTokenCount; + ConsumeAllTokens = consumeAllTokens; + } + + /// Initializes a new instance of for deserialization. + internal LimitTokenFilter() + { + } + + /// The maximum number of tokens to produce. Default is 1. + public int? MaxTokenCount { get; set; } + /// + /// A value indicating whether all tokens from the input must be consumed even if + /// maxTokenCount is reached. Default is false. + /// + public bool? ConsumeAllTokens { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.Serialization.cs new file mode 100644 index 000000000000..f077702750eb --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.Serialization.cs @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class ListAliasesResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListAliasesResult)} does not support writing '{format}' format."); + } + + if (options.Format != "W") + { + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Aliases) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListAliasesResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListAliasesResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListAliasesResult(document.RootElement, options); + } + + internal static ListAliasesResult DeserializeListAliasesResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchAlias.DeserializeSearchAlias(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListAliasesResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListAliasesResult)} does not support writing '{options.Format}' format."); + } + } + + ListAliasesResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListAliasesResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListAliasesResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListAliasesResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListAliasesResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.cs new file mode 100644 index 000000000000..99b5c682121b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListAliasesResult.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Response from a List Aliases request. If successful, it includes the associated + /// index mappings for all aliases. + /// + internal partial class ListAliasesResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal ListAliasesResult() + { + Aliases = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The aliases in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListAliasesResult(IReadOnlyList aliases, IDictionary serializedAdditionalRawData) + { + Aliases = aliases; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The aliases in the Search service. + public IReadOnlyList Aliases { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.Serialization.cs new file mode 100644 index 000000000000..92ea65cff799 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class ListDataSourcesResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListDataSourcesResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in DataSources) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListDataSourcesResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListDataSourcesResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListDataSourcesResult(document.RootElement, options); + } + + internal static ListDataSourcesResult DeserializeListDataSourcesResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListDataSourcesResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListDataSourcesResult)} does not support writing '{options.Format}' format."); + } + } + + ListDataSourcesResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListDataSourcesResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListDataSourcesResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListDataSourcesResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListDataSourcesResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.cs new file mode 100644 index 000000000000..ec5225bf9666 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListDataSourcesResult.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Response from a List Datasources request. If successful, it includes the full + /// definitions of all datasources. + /// + public partial class ListDataSourcesResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The datasources in the Search service. + /// is null. + internal ListDataSourcesResult(IEnumerable dataSources) + { + Argument.AssertNotNull(dataSources, nameof(dataSources)); + + DataSources = dataSources.ToList(); + } + + /// Initializes a new instance of . + /// The datasources in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListDataSourcesResult(IReadOnlyList dataSources, IDictionary serializedAdditionalRawData) + { + DataSources = dataSources; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListDataSourcesResult() + { + } + + /// The datasources in the Search service. + public IReadOnlyList DataSources { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.Serialization.cs new file mode 100644 index 000000000000..d753b01c4d55 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ListIndexersResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListIndexersResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Indexers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListIndexersResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListIndexersResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListIndexersResult(document.RootElement, options); + } + + internal static ListIndexersResult DeserializeListIndexersResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexer.DeserializeSearchIndexer(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListIndexersResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListIndexersResult)} does not support writing '{options.Format}' format."); + } + } + + ListIndexersResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListIndexersResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListIndexersResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListIndexersResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListIndexersResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.cs new file mode 100644 index 000000000000..965b903ae49a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexersResult.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Response from a List Indexers request. If successful, it includes the full + /// definitions of all indexers. + /// + public partial class ListIndexersResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The indexers in the Search service. + /// is null. + internal ListIndexersResult(IEnumerable indexers) + { + Argument.AssertNotNull(indexers, nameof(indexers)); + + Indexers = indexers.ToList(); + } + + /// Initializes a new instance of . + /// The indexers in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListIndexersResult(IReadOnlyList indexers, IDictionary serializedAdditionalRawData) + { + Indexers = indexers; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListIndexersResult() + { + } + + /// The indexers in the Search service. + public IReadOnlyList Indexers { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.Serialization.cs new file mode 100644 index 000000000000..d2686b6be10d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class ListIndexesResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListIndexesResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Indexes) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListIndexesResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListIndexesResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListIndexesResult(document.RootElement, options); + } + + internal static ListIndexesResult DeserializeListIndexesResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndex.DeserializeSearchIndex(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListIndexesResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListIndexesResult)} does not support writing '{options.Format}' format."); + } + } + + ListIndexesResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListIndexesResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListIndexesResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListIndexesResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListIndexesResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.cs new file mode 100644 index 000000000000..e86ea31829c3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListIndexesResult.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Response from a List Indexes request. If successful, it includes the full + /// definitions of all indexes. + /// + internal partial class ListIndexesResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The indexes in the Search service. + /// is null. + internal ListIndexesResult(IEnumerable indexes) + { + Argument.AssertNotNull(indexes, nameof(indexes)); + + Indexes = indexes.ToList(); + } + + /// Initializes a new instance of . + /// The indexes in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListIndexesResult(IReadOnlyList indexes, IDictionary serializedAdditionalRawData) + { + Indexes = indexes; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListIndexesResult() + { + } + + /// The indexes in the Search service. + public IReadOnlyList Indexes { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.Serialization.cs new file mode 100644 index 000000000000..74b2146612d5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ListSkillsetsResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListSkillsetsResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Skillsets) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListSkillsetsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListSkillsetsResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListSkillsetsResult(document.RootElement, options); + } + + internal static ListSkillsetsResult DeserializeListSkillsetsResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexerSkillset.DeserializeSearchIndexerSkillset(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListSkillsetsResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListSkillsetsResult)} does not support writing '{options.Format}' format."); + } + } + + ListSkillsetsResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListSkillsetsResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListSkillsetsResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListSkillsetsResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListSkillsetsResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.cs new file mode 100644 index 000000000000..2f87992a680c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListSkillsetsResult.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Response from a list skillset request. If successful, it includes the full + /// definitions of all skillsets. + /// + public partial class ListSkillsetsResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The skillsets defined in the Search service. + /// is null. + internal ListSkillsetsResult(IEnumerable skillsets) + { + Argument.AssertNotNull(skillsets, nameof(skillsets)); + + Skillsets = skillsets.ToList(); + } + + /// Initializes a new instance of . + /// The skillsets defined in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListSkillsetsResult(IReadOnlyList skillsets, IDictionary serializedAdditionalRawData) + { + Skillsets = skillsets; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListSkillsetsResult() + { + } + + /// The skillsets defined in the Search service. + public IReadOnlyList Skillsets { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.Serialization.cs new file mode 100644 index 000000000000..f276fa827b82 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.Serialization.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ListSynonymMapsResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListSynonymMapsResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in SynonymMaps) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ListSynonymMapsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListSynonymMapsResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListSynonymMapsResult(document.RootElement, options); + } + + internal static ListSynonymMapsResult DeserializeListSynonymMapsResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SynonymMap.DeserializeSynonymMap(item, options)); + } + value = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListSynonymMapsResult(value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListSynonymMapsResult)} does not support writing '{options.Format}' format."); + } + } + + ListSynonymMapsResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListSynonymMapsResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListSynonymMapsResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListSynonymMapsResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeListSynonymMapsResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.cs new file mode 100644 index 000000000000..dea88c576a5d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ListSynonymMapsResult.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Response from a List SynonymMaps request. If successful, it includes the full + /// definitions of all synonym maps. + /// + public partial class ListSynonymMapsResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The synonym maps in the Search service. + /// is null. + internal ListSynonymMapsResult(IEnumerable synonymMaps) + { + Argument.AssertNotNull(synonymMaps, nameof(synonymMaps)); + + SynonymMaps = synonymMaps.ToList(); + } + + /// Initializes a new instance of . + /// The synonym maps in the Search service. + /// Keeps track of any properties unknown to the library. + internal ListSynonymMapsResult(IReadOnlyList synonymMaps, IDictionary serializedAdditionalRawData) + { + SynonymMaps = synonymMaps; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListSynonymMapsResult() + { + } + + /// The synonym maps in the Search service. + public IReadOnlyList SynonymMaps { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.Serialization.cs new file mode 100644 index 000000000000..5cc33c70cae1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.Serialization.cs @@ -0,0 +1,128 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class LookupDocument : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LookupDocument)} does not support writing '{format}' format."); + } + + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + LookupDocument IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LookupDocument)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLookupDocument(document.RootElement, options); + } + + internal static LookupDocument DeserializeLookupDocument(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new LookupDocument(additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LookupDocument)} does not support writing '{options.Format}' format."); + } + } + + LookupDocument IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLookupDocument(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LookupDocument)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LookupDocument FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLookupDocument(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.cs b/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.cs new file mode 100644 index 000000000000..26cafcaff59b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LookupDocument.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A document retrieved via a document lookup operation. + public partial class LookupDocument + { + /// Initializes a new instance of . + internal LookupDocument() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Additional Properties. + internal LookupDocument(IReadOnlyDictionary additionalProperties) + { + AdditionalProperties = additionalProperties; + } + + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.Serialization.cs new file mode 100644 index 000000000000..9e15f9d940ea --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class LuceneStandardAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LuceneStandardAnalyzer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + if (Optional.IsCollectionDefined(Stopwords)) + { + writer.WritePropertyName("stopwords"u8); + writer.WriteStartArray(); + foreach (var item in Stopwords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + } + + LuceneStandardAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LuceneStandardAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLuceneStandardAnalyzer(document.RootElement, options); + } + + internal static LuceneStandardAnalyzer DeserializeLuceneStandardAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + IList stopwords = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("stopwords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + stopwords = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LuceneStandardAnalyzer(odataType, name, serializedAdditionalRawData, maxTokenLength, stopwords ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LuceneStandardAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + LuceneStandardAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLuceneStandardAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LuceneStandardAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new LuceneStandardAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLuceneStandardAnalyzer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.cs new file mode 100644 index 000000000000..e89e96b5c0a7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardAnalyzer.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Standard Apache Lucene analyzer; Composed of the standard tokenizer, lowercase + /// filter and stop filter. + /// + public partial class LuceneStandardAnalyzer : LexicalAnalyzer + { + /// Initializes a new instance of . + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public LuceneStandardAnalyzer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.StandardAnalyzer"; + Stopwords = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + /// A list of stopwords. + internal LuceneStandardAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength, IList stopwords) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + Stopwords = stopwords; + } + + /// Initializes a new instance of for deserialization. + internal LuceneStandardAnalyzer() + { + } + + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + public int? MaxTokenLength { get; set; } + /// A list of stopwords. + public IList Stopwords { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.Serialization.cs new file mode 100644 index 000000000000..a62fc0b85904 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class LuceneStandardTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.LuceneStandardTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + } + + Search.Documents.Indexes.Models.LuceneStandardTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.LuceneStandardTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.LuceneStandardTokenizer DeserializeLuceneStandardTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.LuceneStandardTokenizer(odataType, name, serializedAdditionalRawData, maxTokenLength); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.LuceneStandardTokenizer)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.LuceneStandardTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.LuceneStandardTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.LuceneStandardTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.cs new file mode 100644 index 000000000000..0bb8ac9fa286 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/LuceneStandardTokenizer.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Breaks text following the Unicode Text Segmentation rules. This tokenizer is + /// implemented using Apache Lucene. + /// + public partial class LuceneStandardTokenizer : Search.Documents.LexicalTokenizer + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + internal LuceneStandardTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + } + + /// Initializes a new instance of for deserialization. + internal LuceneStandardTokenizer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.Serialization.cs new file mode 100644 index 000000000000..f8e6a018c1da --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MagnitudeScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MagnitudeScoringFunction)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("magnitude"u8); + writer.WriteObjectValue(Parameters, options); + } + + MagnitudeScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MagnitudeScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMagnitudeScoringFunction(document.RootElement, options); + } + + internal static MagnitudeScoringFunction DeserializeMagnitudeScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + MagnitudeScoringParameters magnitude = default; + string fieldName = default; + double boost = default; + ScoringFunctionInterpolation? interpolation = default; + string type = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("magnitude"u8)) + { + magnitude = MagnitudeScoringParameters.DeserializeMagnitudeScoringParameters(property.Value, options); + continue; + } + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("boost"u8)) + { + boost = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("interpolation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + interpolation = new ScoringFunctionInterpolation(property.Value.GetString()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MagnitudeScoringFunction( + fieldName, + boost, + interpolation, + type, + serializedAdditionalRawData, + magnitude); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MagnitudeScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + MagnitudeScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMagnitudeScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MagnitudeScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new MagnitudeScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMagnitudeScoringFunction(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.cs new file mode 100644 index 000000000000..0d1f8fb16e0c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringFunction.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Defines a function that boosts scores based on the magnitude of a numeric field. + public partial class MagnitudeScoringFunction : ScoringFunction + { + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// Parameter values for the magnitude scoring function. + /// or is null. + public MagnitudeScoringFunction(string fieldName, double boost, MagnitudeScoringParameters parameters) : base(fieldName, boost) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + Argument.AssertNotNull(parameters, nameof(parameters)); + + Type = "magnitude"; + Parameters = parameters; + } + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + /// Parameter values for the magnitude scoring function. + internal MagnitudeScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData, MagnitudeScoringParameters parameters) : base(fieldName, boost, interpolation, type, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal MagnitudeScoringFunction() + { + } + + /// Parameter values for the magnitude scoring function. + public MagnitudeScoringParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.Serialization.cs new file mode 100644 index 000000000000..fbcd10d3de40 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MagnitudeScoringParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MagnitudeScoringParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("boostingRangeStart"u8); + writer.WriteNumberValue(BoostingRangeStart); + writer.WritePropertyName("boostingRangeEnd"u8); + writer.WriteNumberValue(BoostingRangeEnd); + if (Optional.IsDefined(ShouldBoostBeyondRangeByConstant)) + { + writer.WritePropertyName("constantBoostBeyondRange"u8); + writer.WriteBooleanValue(ShouldBoostBeyondRangeByConstant.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + MagnitudeScoringParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MagnitudeScoringParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMagnitudeScoringParameters(document.RootElement, options); + } + + internal static MagnitudeScoringParameters DeserializeMagnitudeScoringParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double boostingRangeStart = default; + double boostingRangeEnd = default; + bool? constantBoostBeyondRange = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("boostingRangeStart"u8)) + { + boostingRangeStart = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("boostingRangeEnd"u8)) + { + boostingRangeEnd = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("constantBoostBeyondRange"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + constantBoostBeyondRange = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MagnitudeScoringParameters(boostingRangeStart, boostingRangeEnd, constantBoostBeyondRange, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MagnitudeScoringParameters)} does not support writing '{options.Format}' format."); + } + } + + MagnitudeScoringParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMagnitudeScoringParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MagnitudeScoringParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static MagnitudeScoringParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMagnitudeScoringParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.cs new file mode 100644 index 000000000000..f13600224fe7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MagnitudeScoringParameters.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Provides parameter values to a magnitude scoring function. + public partial class MagnitudeScoringParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The field value at which boosting starts. + /// The field value at which boosting ends. + public MagnitudeScoringParameters(double boostingRangeStart, double boostingRangeEnd) + { + BoostingRangeStart = boostingRangeStart; + BoostingRangeEnd = boostingRangeEnd; + } + + /// Initializes a new instance of . + /// The field value at which boosting starts. + /// The field value at which boosting ends. + /// + /// A value indicating whether to apply a constant boost for field values beyond + /// the range end value; default is false. + /// + /// Keeps track of any properties unknown to the library. + internal MagnitudeScoringParameters(double boostingRangeStart, double boostingRangeEnd, bool? shouldBoostBeyondRangeByConstant, IDictionary serializedAdditionalRawData) + { + BoostingRangeStart = boostingRangeStart; + BoostingRangeEnd = boostingRangeEnd; + ShouldBoostBeyondRangeByConstant = shouldBoostBeyondRangeByConstant; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal MagnitudeScoringParameters() + { + } + + /// The field value at which boosting starts. + public double BoostingRangeStart { get; set; } + /// The field value at which boosting ends. + public double BoostingRangeEnd { get; set; } + /// + /// A value indicating whether to apply a constant boost for field values beyond + /// the range end value; default is false. + /// + public bool? ShouldBoostBeyondRangeByConstant { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.Serialization.cs new file mode 100644 index 000000000000..fa01214d13da --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MappingCharFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MappingCharFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("mappings"u8); + writer.WriteStartArray(); + foreach (var item in Mappings) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + + MappingCharFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MappingCharFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMappingCharFilter(document.RootElement, options); + } + + internal static MappingCharFilter DeserializeMappingCharFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList mappings = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("mappings"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + mappings = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MappingCharFilter(odataType, name, serializedAdditionalRawData, mappings); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MappingCharFilter)} does not support writing '{options.Format}' format."); + } + } + + MappingCharFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMappingCharFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MappingCharFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new MappingCharFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMappingCharFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.cs new file mode 100644 index 000000000000..edd94843895c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MappingCharFilter.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// A character filter that applies mappings defined with the mappings option. + /// Matching is greedy (longest pattern matching at a given point wins). + /// Replacement is allowed to be the empty string. This character filter is + /// implemented using Apache Lucene. + /// + public partial class MappingCharFilter : CharFilter + { + /// Initializes a new instance of . + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// + /// A list of mappings of the following format: "a=>b" (all occurrences of the + /// character "a" will be replaced with character "b"). + /// + /// or is null. + public MappingCharFilter(string name, IEnumerable mappings) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(mappings, nameof(mappings)); + + OdataType = "#Microsoft.Azure.Search.MappingCharFilter"; + Mappings = mappings.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A list of mappings of the following format: "a=>b" (all occurrences of the + /// character "a" will be replaced with character "b"). + /// + internal MappingCharFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList mappings) : base(odataType, name, serializedAdditionalRawData) + { + Mappings = mappings; + } + + /// Initializes a new instance of for deserialization. + internal MappingCharFilter() + { + } + + /// + /// A list of mappings of the following format: "a=>b" (all occurrences of the + /// character "a" will be replaced with character "b"). + /// + public IList Mappings { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MarkdownHeaderDepth.cs b/sdk/search/Azure.Search.Documents/src/Generated/MarkdownHeaderDepth.cs new file mode 100644 index 000000000000..4ea96578f71b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MarkdownHeaderDepth.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Specifies the max header depth that will be considered while grouping markdown + /// content. Default is `h6`. + /// + public readonly partial struct MarkdownHeaderDepth : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public MarkdownHeaderDepth(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string H1Value = "h1"; + private const string H2Value = "h2"; + private const string H3Value = "h3"; + private const string H4Value = "h4"; + private const string H5Value = "h5"; + private const string H6Value = "h6"; + + /// + /// Indicates that headers up to a level of h1 will be considered while grouping + /// markdown content. + /// + public static MarkdownHeaderDepth H1 { get; } = new MarkdownHeaderDepth(H1Value); + /// + /// Indicates that headers up to a level of h2 will be considered while grouping + /// markdown content. + /// + public static MarkdownHeaderDepth H2 { get; } = new MarkdownHeaderDepth(H2Value); + /// + /// Indicates that headers up to a level of h3 will be considered while grouping + /// markdown content. + /// + public static MarkdownHeaderDepth H3 { get; } = new MarkdownHeaderDepth(H3Value); + /// + /// Indicates that headers up to a level of h4 will be considered while grouping + /// markdown content. + /// + public static MarkdownHeaderDepth H4 { get; } = new MarkdownHeaderDepth(H4Value); + /// + /// Indicates that headers up to a level of h5 will be considered while grouping + /// markdown content. + /// + public static MarkdownHeaderDepth H5 { get; } = new MarkdownHeaderDepth(H5Value); + /// + /// Indicates that headers up to a level of h6 will be considered while grouping + /// markdown content. This is the default. + /// + public static MarkdownHeaderDepth H6 { get; } = new MarkdownHeaderDepth(H6Value); + /// Determines if two values are the same. + public static bool operator ==(MarkdownHeaderDepth left, MarkdownHeaderDepth right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(MarkdownHeaderDepth left, MarkdownHeaderDepth right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator MarkdownHeaderDepth(string value) => new MarkdownHeaderDepth(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is MarkdownHeaderDepth other && Equals(other); + /// + public bool Equals(MarkdownHeaderDepth other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownParsingSubmode.cs b/sdk/search/Azure.Search.Documents/src/Generated/MarkdownParsingSubmode.cs similarity index 80% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownParsingSubmode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/MarkdownParsingSubmode.cs index f272b7fc8e97..4168da487a0b 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownParsingSubmode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/MarkdownParsingSubmode.cs @@ -8,9 +8,13 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. + /// + /// Specifies the submode that will determine whether a markdown file will be + /// parsed into exactly one search document or multiple search documents. Default + /// is `oneToMany`. + /// public readonly partial struct MarkdownParsingSubmode : IEquatable { private readonly string _value; @@ -25,7 +29,12 @@ public MarkdownParsingSubmode(string value) private const string OneToManyValue = "oneToMany"; private const string OneToOneValue = "oneToOne"; - /// Indicates that each section of the markdown file (up to a specified depth) will be parsed into individual search documents. This can result in a single markdown file producing multiple search documents. This is the default sub-mode. + /// + /// Indicates that each section of the markdown file (up to a specified depth) will + /// be parsed into individual search documents. This can result in a single + /// markdown file producing multiple search documents. This is the default + /// sub-mode. + /// public static MarkdownParsingSubmode OneToMany { get; } = new MarkdownParsingSubmode(OneToManyValue); /// Indicates that each markdown file will be parsed into a single search document. public static MarkdownParsingSubmode OneToOne { get; } = new MarkdownParsingSubmode(OneToOneValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.Serialization.cs new file mode 100644 index 000000000000..064daffcc46c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MergeSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MergeSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(InsertPreTag)) + { + writer.WritePropertyName("insertPreTag"u8); + writer.WriteStringValue(InsertPreTag); + } + if (Optional.IsDefined(InsertPostTag)) + { + writer.WritePropertyName("insertPostTag"u8); + writer.WriteStringValue(InsertPostTag); + } + } + + MergeSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MergeSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMergeSkill(document.RootElement, options); + } + + internal static MergeSkill DeserializeMergeSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string insertPreTag = default; + string insertPostTag = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("insertPreTag"u8)) + { + insertPreTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("insertPostTag"u8)) + { + insertPostTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MergeSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + insertPreTag, + insertPostTag); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MergeSkill)} does not support writing '{options.Format}' format."); + } + } + + MergeSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMergeSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MergeSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new MergeSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMergeSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.cs new file mode 100644 index 000000000000..22923ace0436 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MergeSkill.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill for merging two or more strings into a single unified string, with an + /// optional user-defined delimiter separating each component part. + /// + public partial class MergeSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public MergeSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.MergeSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The tag indicates the start of the merged text. By default, the tag is an empty + /// space. + /// + /// + /// The tag indicates the end of the merged text. By default, the tag is an empty + /// space. + /// + internal MergeSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string insertPreTag, string insertPostTag) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + InsertPreTag = insertPreTag; + InsertPostTag = insertPostTag; + } + + /// Initializes a new instance of for deserialization. + internal MergeSkill() + { + } + + /// + /// The tag indicates the start of the merged text. By default, the tag is an empty + /// space. + /// + public string InsertPreTag { get; set; } + /// + /// The tag indicates the end of the merged text. By default, the tag is an empty + /// space. + /// + public string InsertPostTag { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.Serialization.cs new file mode 100644 index 000000000000..077b2175d229 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MicrosoftLanguageStemmingTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MicrosoftLanguageStemmingTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + if (Optional.IsDefined(IsSearchTokenizer)) + { + writer.WritePropertyName("isSearchTokenizer"u8); + writer.WriteBooleanValue(IsSearchTokenizer.Value); + } + if (Optional.IsDefined(Language)) + { + writer.WritePropertyName("language"u8); + writer.WriteStringValue(Language.Value.ToString()); + } + } + + MicrosoftLanguageStemmingTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MicrosoftLanguageStemmingTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMicrosoftLanguageStemmingTokenizer(document.RootElement, options); + } + + internal static MicrosoftLanguageStemmingTokenizer DeserializeMicrosoftLanguageStemmingTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + bool? isSearchTokenizer = default; + MicrosoftStemmingTokenizerLanguage? language = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("isSearchTokenizer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + isSearchTokenizer = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("language"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + language = new MicrosoftStemmingTokenizerLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MicrosoftLanguageStemmingTokenizer( + odataType, + name, + serializedAdditionalRawData, + maxTokenLength, + isSearchTokenizer, + language); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MicrosoftLanguageStemmingTokenizer)} does not support writing '{options.Format}' format."); + } + } + + MicrosoftLanguageStemmingTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMicrosoftLanguageStemmingTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MicrosoftLanguageStemmingTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new MicrosoftLanguageStemmingTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMicrosoftLanguageStemmingTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.cs new file mode 100644 index 000000000000..0e801636e9fe --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageStemmingTokenizer.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Divides text using language-specific rules and reduces words to their base + /// forms. + /// + public partial class MicrosoftLanguageStemmingTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public MicrosoftLanguageStemmingTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Tokens longer than the maximum length are split. + /// Maximum token length that can be used is 300 characters. Tokens longer than 300 + /// characters are first split into tokens of length 300 and then each of those + /// tokens is split based on the max token length set. Default is 255. + /// + /// + /// A value indicating how the tokenizer is used. Set to true if used as the search + /// tokenizer, set to false if used as the indexing tokenizer. Default is false. + /// + /// The language to use. The default is English. + internal MicrosoftLanguageStemmingTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength, bool? isSearchTokenizer, MicrosoftStemmingTokenizerLanguage? language) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + IsSearchTokenizer = isSearchTokenizer; + Language = language; + } + + /// Initializes a new instance of for deserialization. + internal MicrosoftLanguageStemmingTokenizer() + { + } + + /// + /// The maximum token length. Tokens longer than the maximum length are split. + /// Maximum token length that can be used is 300 characters. Tokens longer than 300 + /// characters are first split into tokens of length 300 and then each of those + /// tokens is split based on the max token length set. Default is 255. + /// + public int? MaxTokenLength { get; set; } + /// + /// A value indicating how the tokenizer is used. Set to true if used as the search + /// tokenizer, set to false if used as the indexing tokenizer. Default is false. + /// + public bool? IsSearchTokenizer { get; set; } + /// The language to use. The default is English. + public MicrosoftStemmingTokenizerLanguage? Language { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.Serialization.cs new file mode 100644 index 000000000000..53c20fad2637 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class MicrosoftLanguageTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MicrosoftLanguageTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + if (Optional.IsDefined(IsSearchTokenizer)) + { + writer.WritePropertyName("isSearchTokenizer"u8); + writer.WriteBooleanValue(IsSearchTokenizer.Value); + } + if (Optional.IsDefined(Language)) + { + writer.WritePropertyName("language"u8); + writer.WriteStringValue(Language.Value.ToString()); + } + } + + MicrosoftLanguageTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MicrosoftLanguageTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMicrosoftLanguageTokenizer(document.RootElement, options); + } + + internal static MicrosoftLanguageTokenizer DeserializeMicrosoftLanguageTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + bool? isSearchTokenizer = default; + MicrosoftTokenizerLanguage? language = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("isSearchTokenizer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + isSearchTokenizer = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("language"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + language = new MicrosoftTokenizerLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MicrosoftLanguageTokenizer( + odataType, + name, + serializedAdditionalRawData, + maxTokenLength, + isSearchTokenizer, + language); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MicrosoftLanguageTokenizer)} does not support writing '{options.Format}' format."); + } + } + + MicrosoftLanguageTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMicrosoftLanguageTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MicrosoftLanguageTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new MicrosoftLanguageTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeMicrosoftLanguageTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.cs new file mode 100644 index 000000000000..7808dd851651 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftLanguageTokenizer.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Divides text using language-specific rules. + public partial class MicrosoftLanguageTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public MicrosoftLanguageTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Tokens longer than the maximum length are split. + /// Maximum token length that can be used is 300 characters. Tokens longer than 300 + /// characters are first split into tokens of length 300 and then each of those + /// tokens is split based on the max token length set. Default is 255. + /// + /// + /// A value indicating how the tokenizer is used. Set to true if used as the search + /// tokenizer, set to false if used as the indexing tokenizer. Default is false. + /// + /// The language to use. The default is English. + internal MicrosoftLanguageTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength, bool? isSearchTokenizer, MicrosoftTokenizerLanguage? language) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + IsSearchTokenizer = isSearchTokenizer; + Language = language; + } + + /// Initializes a new instance of for deserialization. + internal MicrosoftLanguageTokenizer() + { + } + + /// + /// The maximum token length. Tokens longer than the maximum length are split. + /// Maximum token length that can be used is 300 characters. Tokens longer than 300 + /// characters are first split into tokens of length 300 and then each of those + /// tokens is split based on the max token length set. Default is 255. + /// + public int? MaxTokenLength { get; set; } + /// + /// A value indicating how the tokenizer is used. Set to true if used as the search + /// tokenizer, set to false if used as the indexing tokenizer. Default is false. + /// + public bool? IsSearchTokenizer { get; set; } + /// The language to use. The default is English. + public MicrosoftTokenizerLanguage? Language { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftStemmingTokenizerLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftStemmingTokenizerLanguage.cs new file mode 100644 index 000000000000..82d97263731f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftStemmingTokenizerLanguage.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Lists the languages supported by the Microsoft language stemming tokenizer. + public readonly partial struct MicrosoftStemmingTokenizerLanguage : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public MicrosoftStemmingTokenizerLanguage(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ArabicValue = "arabic"; + private const string BanglaValue = "bangla"; + private const string BulgarianValue = "bulgarian"; + private const string CatalanValue = "catalan"; + private const string CroatianValue = "croatian"; + private const string CzechValue = "czech"; + private const string DanishValue = "danish"; + private const string DutchValue = "dutch"; + private const string EnglishValue = "english"; + private const string EstonianValue = "estonian"; + private const string FinnishValue = "finnish"; + private const string FrenchValue = "french"; + private const string GermanValue = "german"; + private const string GreekValue = "greek"; + private const string GujaratiValue = "gujarati"; + private const string HebrewValue = "hebrew"; + private const string HindiValue = "hindi"; + private const string HungarianValue = "hungarian"; + private const string IcelandicValue = "icelandic"; + private const string IndonesianValue = "indonesian"; + private const string ItalianValue = "italian"; + private const string KannadaValue = "kannada"; + private const string LatvianValue = "latvian"; + private const string LithuanianValue = "lithuanian"; + private const string MalayValue = "malay"; + private const string MalayalamValue = "malayalam"; + private const string MarathiValue = "marathi"; + private const string NorwegianBokmaalValue = "norwegianBokmaal"; + private const string PolishValue = "polish"; + private const string PortugueseValue = "portuguese"; + private const string PortugueseBrazilianValue = "portugueseBrazilian"; + private const string PunjabiValue = "punjabi"; + private const string RomanianValue = "romanian"; + private const string RussianValue = "russian"; + private const string SerbianCyrillicValue = "serbianCyrillic"; + private const string SerbianLatinValue = "serbianLatin"; + private const string SlovakValue = "slovak"; + private const string SlovenianValue = "slovenian"; + private const string SpanishValue = "spanish"; + private const string SwedishValue = "swedish"; + private const string TamilValue = "tamil"; + private const string TeluguValue = "telugu"; + private const string TurkishValue = "turkish"; + private const string UkrainianValue = "ukrainian"; + private const string UrduValue = "urdu"; + + /// Selects the Microsoft stemming tokenizer for Arabic. + public static MicrosoftStemmingTokenizerLanguage Arabic { get; } = new MicrosoftStemmingTokenizerLanguage(ArabicValue); + /// Selects the Microsoft stemming tokenizer for Bangla. + public static MicrosoftStemmingTokenizerLanguage Bangla { get; } = new MicrosoftStemmingTokenizerLanguage(BanglaValue); + /// Selects the Microsoft stemming tokenizer for Bulgarian. + public static MicrosoftStemmingTokenizerLanguage Bulgarian { get; } = new MicrosoftStemmingTokenizerLanguage(BulgarianValue); + /// Selects the Microsoft stemming tokenizer for Catalan. + public static MicrosoftStemmingTokenizerLanguage Catalan { get; } = new MicrosoftStemmingTokenizerLanguage(CatalanValue); + /// Selects the Microsoft stemming tokenizer for Croatian. + public static MicrosoftStemmingTokenizerLanguage Croatian { get; } = new MicrosoftStemmingTokenizerLanguage(CroatianValue); + /// Selects the Microsoft stemming tokenizer for Czech. + public static MicrosoftStemmingTokenizerLanguage Czech { get; } = new MicrosoftStemmingTokenizerLanguage(CzechValue); + /// Selects the Microsoft stemming tokenizer for Danish. + public static MicrosoftStemmingTokenizerLanguage Danish { get; } = new MicrosoftStemmingTokenizerLanguage(DanishValue); + /// Selects the Microsoft stemming tokenizer for Dutch. + public static MicrosoftStemmingTokenizerLanguage Dutch { get; } = new MicrosoftStemmingTokenizerLanguage(DutchValue); + /// Selects the Microsoft stemming tokenizer for English. + public static MicrosoftStemmingTokenizerLanguage English { get; } = new MicrosoftStemmingTokenizerLanguage(EnglishValue); + /// Selects the Microsoft stemming tokenizer for Estonian. + public static MicrosoftStemmingTokenizerLanguage Estonian { get; } = new MicrosoftStemmingTokenizerLanguage(EstonianValue); + /// Selects the Microsoft stemming tokenizer for Finnish. + public static MicrosoftStemmingTokenizerLanguage Finnish { get; } = new MicrosoftStemmingTokenizerLanguage(FinnishValue); + /// Selects the Microsoft stemming tokenizer for French. + public static MicrosoftStemmingTokenizerLanguage French { get; } = new MicrosoftStemmingTokenizerLanguage(FrenchValue); + /// Selects the Microsoft stemming tokenizer for German. + public static MicrosoftStemmingTokenizerLanguage German { get; } = new MicrosoftStemmingTokenizerLanguage(GermanValue); + /// Selects the Microsoft stemming tokenizer for Greek. + public static MicrosoftStemmingTokenizerLanguage Greek { get; } = new MicrosoftStemmingTokenizerLanguage(GreekValue); + /// Selects the Microsoft stemming tokenizer for Gujarati. + public static MicrosoftStemmingTokenizerLanguage Gujarati { get; } = new MicrosoftStemmingTokenizerLanguage(GujaratiValue); + /// Selects the Microsoft stemming tokenizer for Hebrew. + public static MicrosoftStemmingTokenizerLanguage Hebrew { get; } = new MicrosoftStemmingTokenizerLanguage(HebrewValue); + /// Selects the Microsoft stemming tokenizer for Hindi. + public static MicrosoftStemmingTokenizerLanguage Hindi { get; } = new MicrosoftStemmingTokenizerLanguage(HindiValue); + /// Selects the Microsoft stemming tokenizer for Hungarian. + public static MicrosoftStemmingTokenizerLanguage Hungarian { get; } = new MicrosoftStemmingTokenizerLanguage(HungarianValue); + /// Selects the Microsoft stemming tokenizer for Icelandic. + public static MicrosoftStemmingTokenizerLanguage Icelandic { get; } = new MicrosoftStemmingTokenizerLanguage(IcelandicValue); + /// Selects the Microsoft stemming tokenizer for Indonesian. + public static MicrosoftStemmingTokenizerLanguage Indonesian { get; } = new MicrosoftStemmingTokenizerLanguage(IndonesianValue); + /// Selects the Microsoft stemming tokenizer for Italian. + public static MicrosoftStemmingTokenizerLanguage Italian { get; } = new MicrosoftStemmingTokenizerLanguage(ItalianValue); + /// Selects the Microsoft stemming tokenizer for Kannada. + public static MicrosoftStemmingTokenizerLanguage Kannada { get; } = new MicrosoftStemmingTokenizerLanguage(KannadaValue); + /// Selects the Microsoft stemming tokenizer for Latvian. + public static MicrosoftStemmingTokenizerLanguage Latvian { get; } = new MicrosoftStemmingTokenizerLanguage(LatvianValue); + /// Selects the Microsoft stemming tokenizer for Lithuanian. + public static MicrosoftStemmingTokenizerLanguage Lithuanian { get; } = new MicrosoftStemmingTokenizerLanguage(LithuanianValue); + /// Selects the Microsoft stemming tokenizer for Malay. + public static MicrosoftStemmingTokenizerLanguage Malay { get; } = new MicrosoftStemmingTokenizerLanguage(MalayValue); + /// Selects the Microsoft stemming tokenizer for Malayalam. + public static MicrosoftStemmingTokenizerLanguage Malayalam { get; } = new MicrosoftStemmingTokenizerLanguage(MalayalamValue); + /// Selects the Microsoft stemming tokenizer for Marathi. + public static MicrosoftStemmingTokenizerLanguage Marathi { get; } = new MicrosoftStemmingTokenizerLanguage(MarathiValue); + /// Selects the Microsoft stemming tokenizer for Norwegian (Bokmål). + public static MicrosoftStemmingTokenizerLanguage NorwegianBokmaal { get; } = new MicrosoftStemmingTokenizerLanguage(NorwegianBokmaalValue); + /// Selects the Microsoft stemming tokenizer for Polish. + public static MicrosoftStemmingTokenizerLanguage Polish { get; } = new MicrosoftStemmingTokenizerLanguage(PolishValue); + /// Selects the Microsoft stemming tokenizer for Portuguese. + public static MicrosoftStemmingTokenizerLanguage Portuguese { get; } = new MicrosoftStemmingTokenizerLanguage(PortugueseValue); + /// Selects the Microsoft stemming tokenizer for Portuguese (Brazil). + public static MicrosoftStemmingTokenizerLanguage PortugueseBrazilian { get; } = new MicrosoftStemmingTokenizerLanguage(PortugueseBrazilianValue); + /// Selects the Microsoft stemming tokenizer for Punjabi. + public static MicrosoftStemmingTokenizerLanguage Punjabi { get; } = new MicrosoftStemmingTokenizerLanguage(PunjabiValue); + /// Selects the Microsoft stemming tokenizer for Romanian. + public static MicrosoftStemmingTokenizerLanguage Romanian { get; } = new MicrosoftStemmingTokenizerLanguage(RomanianValue); + /// Selects the Microsoft stemming tokenizer for Russian. + public static MicrosoftStemmingTokenizerLanguage Russian { get; } = new MicrosoftStemmingTokenizerLanguage(RussianValue); + /// Selects the Microsoft stemming tokenizer for Serbian (Cyrillic). + public static MicrosoftStemmingTokenizerLanguage SerbianCyrillic { get; } = new MicrosoftStemmingTokenizerLanguage(SerbianCyrillicValue); + /// Selects the Microsoft stemming tokenizer for Serbian (Latin). + public static MicrosoftStemmingTokenizerLanguage SerbianLatin { get; } = new MicrosoftStemmingTokenizerLanguage(SerbianLatinValue); + /// Selects the Microsoft stemming tokenizer for Slovak. + public static MicrosoftStemmingTokenizerLanguage Slovak { get; } = new MicrosoftStemmingTokenizerLanguage(SlovakValue); + /// Selects the Microsoft stemming tokenizer for Slovenian. + public static MicrosoftStemmingTokenizerLanguage Slovenian { get; } = new MicrosoftStemmingTokenizerLanguage(SlovenianValue); + /// Selects the Microsoft stemming tokenizer for Spanish. + public static MicrosoftStemmingTokenizerLanguage Spanish { get; } = new MicrosoftStemmingTokenizerLanguage(SpanishValue); + /// Selects the Microsoft stemming tokenizer for Swedish. + public static MicrosoftStemmingTokenizerLanguage Swedish { get; } = new MicrosoftStemmingTokenizerLanguage(SwedishValue); + /// Selects the Microsoft stemming tokenizer for Tamil. + public static MicrosoftStemmingTokenizerLanguage Tamil { get; } = new MicrosoftStemmingTokenizerLanguage(TamilValue); + /// Selects the Microsoft stemming tokenizer for Telugu. + public static MicrosoftStemmingTokenizerLanguage Telugu { get; } = new MicrosoftStemmingTokenizerLanguage(TeluguValue); + /// Selects the Microsoft stemming tokenizer for Turkish. + public static MicrosoftStemmingTokenizerLanguage Turkish { get; } = new MicrosoftStemmingTokenizerLanguage(TurkishValue); + /// Selects the Microsoft stemming tokenizer for Ukrainian. + public static MicrosoftStemmingTokenizerLanguage Ukrainian { get; } = new MicrosoftStemmingTokenizerLanguage(UkrainianValue); + /// Selects the Microsoft stemming tokenizer for Urdu. + public static MicrosoftStemmingTokenizerLanguage Urdu { get; } = new MicrosoftStemmingTokenizerLanguage(UrduValue); + /// Determines if two values are the same. + public static bool operator ==(MicrosoftStemmingTokenizerLanguage left, MicrosoftStemmingTokenizerLanguage right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(MicrosoftStemmingTokenizerLanguage left, MicrosoftStemmingTokenizerLanguage right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator MicrosoftStemmingTokenizerLanguage(string value) => new MicrosoftStemmingTokenizerLanguage(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is MicrosoftStemmingTokenizerLanguage other && Equals(other); + /// + public bool Equals(MicrosoftStemmingTokenizerLanguage other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftTokenizerLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftTokenizerLanguage.cs new file mode 100644 index 000000000000..6e38a0990a13 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/MicrosoftTokenizerLanguage.cs @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Lists the languages supported by the Microsoft language tokenizer. + public readonly partial struct MicrosoftTokenizerLanguage : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public MicrosoftTokenizerLanguage(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string BanglaValue = "bangla"; + private const string BulgarianValue = "bulgarian"; + private const string CatalanValue = "catalan"; + private const string ChineseSimplifiedValue = "chineseSimplified"; + private const string ChineseTraditionalValue = "chineseTraditional"; + private const string CroatianValue = "croatian"; + private const string CzechValue = "czech"; + private const string DanishValue = "danish"; + private const string DutchValue = "dutch"; + private const string EnglishValue = "english"; + private const string FrenchValue = "french"; + private const string GermanValue = "german"; + private const string GreekValue = "greek"; + private const string GujaratiValue = "gujarati"; + private const string HindiValue = "hindi"; + private const string IcelandicValue = "icelandic"; + private const string IndonesianValue = "indonesian"; + private const string ItalianValue = "italian"; + private const string JapaneseValue = "japanese"; + private const string KannadaValue = "kannada"; + private const string KoreanValue = "korean"; + private const string MalayValue = "malay"; + private const string MalayalamValue = "malayalam"; + private const string MarathiValue = "marathi"; + private const string NorwegianBokmaalValue = "norwegianBokmaal"; + private const string PolishValue = "polish"; + private const string PortugueseValue = "portuguese"; + private const string PortugueseBrazilianValue = "portugueseBrazilian"; + private const string PunjabiValue = "punjabi"; + private const string RomanianValue = "romanian"; + private const string RussianValue = "russian"; + private const string SerbianCyrillicValue = "serbianCyrillic"; + private const string SerbianLatinValue = "serbianLatin"; + private const string SlovenianValue = "slovenian"; + private const string SpanishValue = "spanish"; + private const string SwedishValue = "swedish"; + private const string TamilValue = "tamil"; + private const string TeluguValue = "telugu"; + private const string ThaiValue = "thai"; + private const string UkrainianValue = "ukrainian"; + private const string UrduValue = "urdu"; + private const string VietnameseValue = "vietnamese"; + + /// Selects the Microsoft tokenizer for Bangla. + public static MicrosoftTokenizerLanguage Bangla { get; } = new MicrosoftTokenizerLanguage(BanglaValue); + /// Selects the Microsoft tokenizer for Bulgarian. + public static MicrosoftTokenizerLanguage Bulgarian { get; } = new MicrosoftTokenizerLanguage(BulgarianValue); + /// Selects the Microsoft tokenizer for Catalan. + public static MicrosoftTokenizerLanguage Catalan { get; } = new MicrosoftTokenizerLanguage(CatalanValue); + /// Selects the Microsoft tokenizer for Chinese (Simplified). + public static MicrosoftTokenizerLanguage ChineseSimplified { get; } = new MicrosoftTokenizerLanguage(ChineseSimplifiedValue); + /// Selects the Microsoft tokenizer for Chinese (Traditional). + public static MicrosoftTokenizerLanguage ChineseTraditional { get; } = new MicrosoftTokenizerLanguage(ChineseTraditionalValue); + /// Selects the Microsoft tokenizer for Croatian. + public static MicrosoftTokenizerLanguage Croatian { get; } = new MicrosoftTokenizerLanguage(CroatianValue); + /// Selects the Microsoft tokenizer for Czech. + public static MicrosoftTokenizerLanguage Czech { get; } = new MicrosoftTokenizerLanguage(CzechValue); + /// Selects the Microsoft tokenizer for Danish. + public static MicrosoftTokenizerLanguage Danish { get; } = new MicrosoftTokenizerLanguage(DanishValue); + /// Selects the Microsoft tokenizer for Dutch. + public static MicrosoftTokenizerLanguage Dutch { get; } = new MicrosoftTokenizerLanguage(DutchValue); + /// Selects the Microsoft tokenizer for English. + public static MicrosoftTokenizerLanguage English { get; } = new MicrosoftTokenizerLanguage(EnglishValue); + /// Selects the Microsoft tokenizer for French. + public static MicrosoftTokenizerLanguage French { get; } = new MicrosoftTokenizerLanguage(FrenchValue); + /// Selects the Microsoft tokenizer for German. + public static MicrosoftTokenizerLanguage German { get; } = new MicrosoftTokenizerLanguage(GermanValue); + /// Selects the Microsoft tokenizer for Greek. + public static MicrosoftTokenizerLanguage Greek { get; } = new MicrosoftTokenizerLanguage(GreekValue); + /// Selects the Microsoft tokenizer for Gujarati. + public static MicrosoftTokenizerLanguage Gujarati { get; } = new MicrosoftTokenizerLanguage(GujaratiValue); + /// Selects the Microsoft tokenizer for Hindi. + public static MicrosoftTokenizerLanguage Hindi { get; } = new MicrosoftTokenizerLanguage(HindiValue); + /// Selects the Microsoft tokenizer for Icelandic. + public static MicrosoftTokenizerLanguage Icelandic { get; } = new MicrosoftTokenizerLanguage(IcelandicValue); + /// Selects the Microsoft tokenizer for Indonesian. + public static MicrosoftTokenizerLanguage Indonesian { get; } = new MicrosoftTokenizerLanguage(IndonesianValue); + /// Selects the Microsoft tokenizer for Italian. + public static MicrosoftTokenizerLanguage Italian { get; } = new MicrosoftTokenizerLanguage(ItalianValue); + /// Selects the Microsoft tokenizer for Japanese. + public static MicrosoftTokenizerLanguage Japanese { get; } = new MicrosoftTokenizerLanguage(JapaneseValue); + /// Selects the Microsoft tokenizer for Kannada. + public static MicrosoftTokenizerLanguage Kannada { get; } = new MicrosoftTokenizerLanguage(KannadaValue); + /// Selects the Microsoft tokenizer for Korean. + public static MicrosoftTokenizerLanguage Korean { get; } = new MicrosoftTokenizerLanguage(KoreanValue); + /// Selects the Microsoft tokenizer for Malay. + public static MicrosoftTokenizerLanguage Malay { get; } = new MicrosoftTokenizerLanguage(MalayValue); + /// Selects the Microsoft tokenizer for Malayalam. + public static MicrosoftTokenizerLanguage Malayalam { get; } = new MicrosoftTokenizerLanguage(MalayalamValue); + /// Selects the Microsoft tokenizer for Marathi. + public static MicrosoftTokenizerLanguage Marathi { get; } = new MicrosoftTokenizerLanguage(MarathiValue); + /// Selects the Microsoft tokenizer for Norwegian (Bokmål). + public static MicrosoftTokenizerLanguage NorwegianBokmaal { get; } = new MicrosoftTokenizerLanguage(NorwegianBokmaalValue); + /// Selects the Microsoft tokenizer for Polish. + public static MicrosoftTokenizerLanguage Polish { get; } = new MicrosoftTokenizerLanguage(PolishValue); + /// Selects the Microsoft tokenizer for Portuguese. + public static MicrosoftTokenizerLanguage Portuguese { get; } = new MicrosoftTokenizerLanguage(PortugueseValue); + /// Selects the Microsoft tokenizer for Portuguese (Brazil). + public static MicrosoftTokenizerLanguage PortugueseBrazilian { get; } = new MicrosoftTokenizerLanguage(PortugueseBrazilianValue); + /// Selects the Microsoft tokenizer for Punjabi. + public static MicrosoftTokenizerLanguage Punjabi { get; } = new MicrosoftTokenizerLanguage(PunjabiValue); + /// Selects the Microsoft tokenizer for Romanian. + public static MicrosoftTokenizerLanguage Romanian { get; } = new MicrosoftTokenizerLanguage(RomanianValue); + /// Selects the Microsoft tokenizer for Russian. + public static MicrosoftTokenizerLanguage Russian { get; } = new MicrosoftTokenizerLanguage(RussianValue); + /// Selects the Microsoft tokenizer for Serbian (Cyrillic). + public static MicrosoftTokenizerLanguage SerbianCyrillic { get; } = new MicrosoftTokenizerLanguage(SerbianCyrillicValue); + /// Selects the Microsoft tokenizer for Serbian (Latin). + public static MicrosoftTokenizerLanguage SerbianLatin { get; } = new MicrosoftTokenizerLanguage(SerbianLatinValue); + /// Selects the Microsoft tokenizer for Slovenian. + public static MicrosoftTokenizerLanguage Slovenian { get; } = new MicrosoftTokenizerLanguage(SlovenianValue); + /// Selects the Microsoft tokenizer for Spanish. + public static MicrosoftTokenizerLanguage Spanish { get; } = new MicrosoftTokenizerLanguage(SpanishValue); + /// Selects the Microsoft tokenizer for Swedish. + public static MicrosoftTokenizerLanguage Swedish { get; } = new MicrosoftTokenizerLanguage(SwedishValue); + /// Selects the Microsoft tokenizer for Tamil. + public static MicrosoftTokenizerLanguage Tamil { get; } = new MicrosoftTokenizerLanguage(TamilValue); + /// Selects the Microsoft tokenizer for Telugu. + public static MicrosoftTokenizerLanguage Telugu { get; } = new MicrosoftTokenizerLanguage(TeluguValue); + /// Selects the Microsoft tokenizer for Thai. + public static MicrosoftTokenizerLanguage Thai { get; } = new MicrosoftTokenizerLanguage(ThaiValue); + /// Selects the Microsoft tokenizer for Ukrainian. + public static MicrosoftTokenizerLanguage Ukrainian { get; } = new MicrosoftTokenizerLanguage(UkrainianValue); + /// Selects the Microsoft tokenizer for Urdu. + public static MicrosoftTokenizerLanguage Urdu { get; } = new MicrosoftTokenizerLanguage(UrduValue); + /// Selects the Microsoft tokenizer for Vietnamese. + public static MicrosoftTokenizerLanguage Vietnamese { get; } = new MicrosoftTokenizerLanguage(VietnameseValue); + /// Determines if two values are the same. + public static bool operator ==(MicrosoftTokenizerLanguage left, MicrosoftTokenizerLanguage right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(MicrosoftTokenizerLanguage left, MicrosoftTokenizerLanguage right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator MicrosoftTokenizerLanguage(string value) => new MicrosoftTokenizerLanguage(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is MicrosoftTokenizerLanguage other && Equals(other); + /// + public bool Equals(MicrosoftTokenizerLanguage other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.Serialization.cs deleted file mode 100644 index 37599a33fe0e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.Serialization.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AIServicesAccountIdentity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Identity != null) - { - writer.WritePropertyName("identity"u8); - writer.WriteObjectValue(Identity); - } - else - { - writer.WriteNull("identity"); - } - writer.WritePropertyName("subdomainUrl"u8); - writer.WriteStringValue(SubdomainUrl); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static AIServicesAccountIdentity DeserializeAIServicesAccountIdentity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SearchIndexerDataIdentity identity = default; - string subdomainUrl = default; - string odataType = default; - string description = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("identity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - identity = null; - continue; - } - identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("subdomainUrl"u8)) - { - subdomainUrl = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - } - return new AIServicesAccountIdentity(odataType, description, identity, subdomainUrl); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AIServicesAccountIdentity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAIServicesAccountIdentity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.cs deleted file mode 100644 index 21d3f2861259..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountIdentity.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The multi-region account of an Azure AI service resource that's attached to a skillset. - public partial class AIServicesAccountIdentity : CognitiveServicesAccount - { - /// Initializes a new instance of . - /// - /// The user-assigned managed identity used for connections to AI Service. If not specified, the system-assigned managed identity is used. On updates to the skillset, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The subdomain url for the corresponding AI Service. - /// is null. - public AIServicesAccountIdentity(SearchIndexerDataIdentity identity, string subdomainUrl) - { - Argument.AssertNotNull(subdomainUrl, nameof(subdomainUrl)); - - Identity = identity; - SubdomainUrl = subdomainUrl; - ODataType = "#Microsoft.Azure.Search.AIServicesByIdentity"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - /// - /// The user-assigned managed identity used for connections to AI Service. If not specified, the system-assigned managed identity is used. On updates to the skillset, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The subdomain url for the corresponding AI Service. - internal AIServicesAccountIdentity(string oDataType, string description, SearchIndexerDataIdentity identity, string subdomainUrl) : base(oDataType, description) - { - Identity = identity; - SubdomainUrl = subdomainUrl; - ODataType = oDataType ?? "#Microsoft.Azure.Search.AIServicesByIdentity"; - } - - /// - /// The user-assigned managed identity used for connections to AI Service. If not specified, the system-assigned managed identity is used. On updates to the skillset, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity Identity { get; set; } - /// The subdomain url for the corresponding AI Service. - public string SubdomainUrl { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.Serialization.cs deleted file mode 100644 index 75ff48392394..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.Serialization.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AIServicesAccountKey : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("key"u8); - writer.WriteStringValue(Key); - writer.WritePropertyName("subdomainUrl"u8); - writer.WriteStringValue(SubdomainUrl); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static AIServicesAccountKey DeserializeAIServicesAccountKey(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string key = default; - string subdomainUrl = default; - string odataType = default; - string description = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("subdomainUrl"u8)) - { - subdomainUrl = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - } - return new AIServicesAccountKey(odataType, description, key, subdomainUrl); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AIServicesAccountKey FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAIServicesAccountKey(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.cs deleted file mode 100644 index 9fc583eb0fd6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesAccountKey.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The account key of an Azure AI service resource that's attached to a skillset, to be used with the resource's subdomain. - public partial class AIServicesAccountKey : CognitiveServicesAccount - { - /// Initializes a new instance of . - /// The key used to provision the Azure AI service resource attached to a skillset. - /// The subdomain url for the corresponding AI Service. - /// or is null. - public AIServicesAccountKey(string key, string subdomainUrl) - { - Argument.AssertNotNull(key, nameof(key)); - Argument.AssertNotNull(subdomainUrl, nameof(subdomainUrl)); - - Key = key; - SubdomainUrl = subdomainUrl; - ODataType = "#Microsoft.Azure.Search.AIServicesByKey"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - /// The key used to provision the Azure AI service resource attached to a skillset. - /// The subdomain url for the corresponding AI Service. - internal AIServicesAccountKey(string oDataType, string description, string key, string subdomainUrl) : base(oDataType, description) - { - Key = key; - SubdomainUrl = subdomainUrl; - ODataType = oDataType ?? "#Microsoft.Azure.Search.AIServicesByKey"; - } - - /// The key used to provision the Azure AI service resource attached to a skillset. - public string Key { get; set; } - /// The subdomain url for the corresponding AI Service. - public string SubdomainUrl { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.Serialization.cs deleted file mode 100644 index 515a87de17bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.Serialization.cs +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AIServicesVisionParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - writer.WritePropertyName("resourceUri"u8); - writer.WriteStringValue(ResourceUri.AbsoluteUri); - if (Optional.IsDefined(ApiKey)) - { - writer.WritePropertyName("apiKey"u8); - writer.WriteStringValue(ApiKey); - } - if (Optional.IsDefined(AuthIdentity)) - { - if (AuthIdentity != null) - { - writer.WritePropertyName("authIdentity"u8); - writer.WriteObjectValue(AuthIdentity); - } - else - { - writer.WriteNull("authIdentity"); - } - } - writer.WriteEndObject(); - } - - internal static AIServicesVisionParameters DeserializeAIServicesVisionParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string modelVersion = default; - Uri resourceUri = default; - string apiKey = default; - SearchIndexerDataIdentity authIdentity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("resourceUri"u8)) - { - resourceUri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("apiKey"u8)) - { - apiKey = property.Value.GetString(); - continue; - } - if (property.NameEquals("authIdentity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authIdentity = null; - continue; - } - authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - } - return new AIServicesVisionParameters(modelVersion, resourceUri, apiKey, authIdentity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AIServicesVisionParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAIServicesVisionParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.cs deleted file mode 100644 index 0009bf7e1e56..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionParameters.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the AI Services Vision parameters for vectorizing a query image or text. - public partial class AIServicesVisionParameters - { - /// Initializes a new instance of . - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - /// The resource URI of the AI Services resource. - /// is null. - public AIServicesVisionParameters(string modelVersion, Uri resourceUri) - { - Argument.AssertNotNull(resourceUri, nameof(resourceUri)); - - ModelVersion = modelVersion; - ResourceUri = resourceUri; - } - - /// Initializes a new instance of . - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - /// The resource URI of the AI Services resource. - /// API key of the designated AI Services resource. - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the index, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal AIServicesVisionParameters(string modelVersion, Uri resourceUri, string apiKey, SearchIndexerDataIdentity authIdentity) - { - ModelVersion = modelVersion; - ResourceUri = resourceUri; - ApiKey = apiKey; - AuthIdentity = authIdentity; - } - - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - public string ModelVersion { get; set; } - /// The resource URI of the AI Services resource. - public Uri ResourceUri { get; set; } - /// API key of the designated AI Services resource. - public string ApiKey { get; set; } - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the index, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity AuthIdentity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.Serialization.cs deleted file mode 100644 index ebbcd766c121..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AIServicesVisionVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(AIServicesVisionParameters)) - { - writer.WritePropertyName("aiServicesVisionParameters"u8); - writer.WriteObjectValue(AIServicesVisionParameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static AIServicesVisionVectorizer DeserializeAIServicesVisionVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - AIServicesVisionParameters aiServicesVisionParameters = default; - string name = default; - VectorSearchVectorizerKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("aiServicesVisionParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - aiServicesVisionParameters = AIServicesVisionParameters.DeserializeAIServicesVisionParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchVectorizerKind(property.Value.GetString()); - continue; - } - } - return new AIServicesVisionVectorizer(name, kind, aiServicesVisionParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AIServicesVisionVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAIServicesVisionVectorizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.cs deleted file mode 100644 index 8503b68288d0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AIServicesVisionVectorizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the AI Services Vision parameters for vectorizing a query image or text. - public partial class AIServicesVisionVectorizer : VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// is null. - public AIServicesVisionVectorizer(string vectorizerName) : base(vectorizerName) - { - Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); - - Kind = VectorSearchVectorizerKind.AIServicesVision; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - /// Contains the parameters specific to AI Services Vision embedding vectorization. - internal AIServicesVisionVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, AIServicesVisionParameters aiServicesVisionParameters) : base(vectorizerName, kind) - { - AIServicesVisionParameters = aiServicesVisionParameters; - Kind = kind; - } - - /// Contains the parameters specific to AI Services Vision embedding vectorization. - public AIServicesVisionParameters AIServicesVisionParameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.Serialization.cs deleted file mode 100644 index a6eee5e5be98..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class AnalyzeResult - { - internal static AnalyzeResult DeserializeAnalyzeResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList tokens = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tokens"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(AnalyzedTokenInfo.DeserializeAnalyzedTokenInfo(item)); - } - tokens = array; - continue; - } - } - return new AnalyzeResult(tokens); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AnalyzeResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAnalyzeResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.cs deleted file mode 100644 index cafb6703ca67..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeResult.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The result of testing an analyzer on text. - internal partial class AnalyzeResult - { - /// Initializes a new instance of . - /// The list of tokens returned by the analyzer specified in the request. - /// is null. - internal AnalyzeResult(IEnumerable tokens) - { - Argument.AssertNotNull(tokens, nameof(tokens)); - - Tokens = tokens.ToList(); - } - - /// Initializes a new instance of . - /// The list of tokens returned by the analyzer specified in the request. - internal AnalyzeResult(IReadOnlyList tokens) - { - Tokens = tokens; - } - - /// The list of tokens returned by the analyzer specified in the request. - public IReadOnlyList Tokens { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.Serialization.cs deleted file mode 100644 index f3416858af7f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AnalyzeTextOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("text"u8); - writer.WriteStringValue(Text); - if (Optional.IsDefined(AnalyzerName)) - { - writer.WritePropertyName("analyzer"u8); - writer.WriteStringValue(AnalyzerName.Value.ToString()); - } - if (Optional.IsDefined(TokenizerName)) - { - writer.WritePropertyName("tokenizer"u8); - writer.WriteStringValue(TokenizerName.Value.ToString()); - } - if (Optional.IsDefined(NormalizerName)) - { - writer.WritePropertyName("normalizer"u8); - writer.WriteStringValue(NormalizerName.Value.ToString()); - } - if (Optional.IsCollectionDefined(TokenFilters)) - { - writer.WritePropertyName("tokenFilters"u8); - writer.WriteStartArray(); - foreach (var item in TokenFilters) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(CharFilters)) - { - writer.WritePropertyName("charFilters"u8); - writer.WriteStartArray(); - foreach (var item in CharFilters) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.cs deleted file mode 100644 index 64c414f34db5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzeTextOptions.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies some text and analysis components used to break that text into tokens. - public partial class AnalyzeTextOptions - { - /// Initializes a new instance of . - /// The text to break into tokens. - /// is null. - public AnalyzeTextOptions(string text) - { - Argument.AssertNotNull(text, nameof(text)); - - Text = text; - TokenFilters = new ChangeTrackingList(); - CharFilters = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The text to break into tokens. - /// The name of the analyzer to use to break the given text. If this parameter is not specified, you must specify a tokenizer instead. The tokenizer and analyzer parameters are mutually exclusive. - /// The name of the tokenizer to use to break the given text. If this parameter is not specified, you must specify an analyzer instead. The tokenizer and analyzer parameters are mutually exclusive. - /// The name of the normalizer to use to normalize the given text. - /// An optional list of token filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. - /// An optional list of character filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. - internal AnalyzeTextOptions(string text, LexicalAnalyzerName? analyzerName, LexicalTokenizerName? tokenizerName, LexicalNormalizerName? normalizerName, IList tokenFilters, IList charFilters) - { - Text = text; - AnalyzerName = analyzerName; - TokenizerName = tokenizerName; - NormalizerName = normalizerName; - TokenFilters = tokenFilters; - CharFilters = charFilters; - } - - /// The text to break into tokens. - public string Text { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.Serialization.cs deleted file mode 100644 index da83fa8ffcd5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AnalyzedTokenInfo - { - internal static AnalyzedTokenInfo DeserializeAnalyzedTokenInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string token = default; - int startOffset = default; - int endOffset = default; - int position = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("token"u8)) - { - token = property.Value.GetString(); - continue; - } - if (property.NameEquals("startOffset"u8)) - { - startOffset = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("endOffset"u8)) - { - endOffset = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("position"u8)) - { - position = property.Value.GetInt32(); - continue; - } - } - return new AnalyzedTokenInfo(token, startOffset, endOffset, position); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AnalyzedTokenInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAnalyzedTokenInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.cs deleted file mode 100644 index 21bda20820ff..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AnalyzedTokenInfo.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Information about a token returned by an analyzer. - public partial class AnalyzedTokenInfo - { - /// Initializes a new instance of . - /// The token returned by the analyzer. - /// The index of the first character of the token in the input text. - /// The index of the last character of the token in the input text. - /// The position of the token in the input text relative to other tokens. The first token in the input text has position 0, the next has position 1, and so on. Depending on the analyzer used, some tokens might have the same position, for example if they are synonyms of each other. - internal AnalyzedTokenInfo(string token, int startOffset, int endOffset, int position) - { - Token = token; - StartOffset = startOffset; - EndOffset = endOffset; - Position = position; - } - - /// The token returned by the analyzer. - public string Token { get; } - /// The index of the first character of the token in the input text. - public int StartOffset { get; } - /// The index of the last character of the token in the input text. - public int EndOffset { get; } - /// The position of the token in the input text relative to other tokens. The first token in the input text has position 0, the next has position 1, and so on. Depending on the analyzer used, some tokens might have the same position, for example if they are synonyms of each other. - public int Position { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.Serialization.cs deleted file mode 100644 index a344d6bff209..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AsciiFoldingTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(PreserveOriginal)) - { - writer.WritePropertyName("preserveOriginal"u8); - writer.WriteBooleanValue(PreserveOriginal.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static AsciiFoldingTokenFilter DeserializeAsciiFoldingTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? preserveOriginal = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("preserveOriginal"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - preserveOriginal = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new AsciiFoldingTokenFilter(odataType, name, preserveOriginal); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AsciiFoldingTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAsciiFoldingTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.cs deleted file mode 100644 index eba18808c0d6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AsciiFoldingTokenFilter.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. This token filter is implemented using Apache Lucene. - public partial class AsciiFoldingTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public AsciiFoldingTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.AsciiFoldingTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A value indicating whether the original token will be kept. Default is false. - internal AsciiFoldingTokenFilter(string oDataType, string name, bool? preserveOriginal) : base(oDataType, name) - { - PreserveOriginal = preserveOriginal; - ODataType = oDataType ?? "#Microsoft.Azure.Search.AsciiFoldingTokenFilter"; - } - - /// A value indicating whether the original token will be kept. Default is false. - public bool? PreserveOriginal { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.Serialization.cs deleted file mode 100644 index ad6f5504f2ba..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class AutocompleteItem - { - internal static AutocompleteItem DeserializeAutocompleteItem(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string text = default; - string queryPlusText = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - text = property.Value.GetString(); - continue; - } - if (property.NameEquals("queryPlusText"u8)) - { - queryPlusText = property.Value.GetString(); - continue; - } - } - return new AutocompleteItem(text, queryPlusText); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AutocompleteItem FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAutocompleteItem(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.cs deleted file mode 100644 index 570232f508bd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteItem.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The result of Autocomplete requests. - public partial class AutocompleteItem - { - /// Initializes a new instance of . - /// The completed term. - /// The query along with the completed term. - internal AutocompleteItem(string text, string queryPlusText) - { - Text = text; - QueryPlusText = queryPlusText; - } - - /// The completed term. - public string Text { get; } - /// The query along with the completed term. - public string QueryPlusText { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.Serialization.cs deleted file mode 100644 index 114f3dc11bdf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.Serialization.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - internal static partial class AutocompleteModeExtensions - { - public static string ToSerialString(this AutocompleteMode value) => value switch - { - AutocompleteMode.OneTerm => "oneTerm", - AutocompleteMode.TwoTerms => "twoTerms", - AutocompleteMode.OneTermWithContext => "oneTermWithContext", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AutocompleteMode value.") - }; - - public static AutocompleteMode ToAutocompleteMode(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "oneTerm")) return AutocompleteMode.OneTerm; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "twoTerms")) return AutocompleteMode.TwoTerms; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "oneTermWithContext")) return AutocompleteMode.OneTermWithContext; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown AutocompleteMode value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.cs deleted file mode 100644 index 8671719ad663..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteMode.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' to get shingles and 'oneTermWithContext' to use the current context in producing autocomplete terms. - public enum AutocompleteMode - { - /// Only one term is suggested. If the query has two terms, only the last term is completed. For example, if the input is 'washington medic', the suggested terms could include 'medicaid', 'medicare', and 'medicine'. - OneTerm, - /// Matching two-term phrases in the index will be suggested. For example, if the input is 'medic', the suggested terms could include 'medicare coverage' and 'medical assistant'. - TwoTerms, - /// Completes the last term in a query with two or more terms, where the last two terms are a phrase that exists in the index. For example, if the input is 'washington medic', the suggested terms could include 'washington medicaid' and 'washington medical'. - OneTermWithContext - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.Serialization.cs deleted file mode 100644 index d08830ff9ede..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.Serialization.cs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - public partial class AutocompleteOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("search"u8); - writer.WriteStringValue(SearchText); - if (Optional.IsDefined(Mode)) - { - writer.WritePropertyName("autocompleteMode"u8); - writer.WriteStringValue(Mode.Value.ToSerialString()); - } - if (Optional.IsDefined(Filter)) - { - writer.WritePropertyName("filter"u8); - writer.WriteStringValue(Filter); - } - if (Optional.IsDefined(UseFuzzyMatching)) - { - writer.WritePropertyName("fuzzy"u8); - writer.WriteBooleanValue(UseFuzzyMatching.Value); - } - if (Optional.IsDefined(HighlightPostTag)) - { - writer.WritePropertyName("highlightPostTag"u8); - writer.WriteStringValue(HighlightPostTag); - } - if (Optional.IsDefined(HighlightPreTag)) - { - writer.WritePropertyName("highlightPreTag"u8); - writer.WriteStringValue(HighlightPreTag); - } - if (Optional.IsDefined(MinimumCoverage)) - { - writer.WritePropertyName("minimumCoverage"u8); - writer.WriteNumberValue(MinimumCoverage.Value); - } - if (Optional.IsDefined(SearchFieldsRaw)) - { - writer.WritePropertyName("searchFields"u8); - writer.WriteStringValue(SearchFieldsRaw); - } - writer.WritePropertyName("suggesterName"u8); - writer.WriteStringValue(SuggesterName); - if (Optional.IsDefined(Size)) - { - writer.WritePropertyName("top"u8); - writer.WriteNumberValue(Size.Value); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.cs deleted file mode 100644 index 094faba55211..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteOptions.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - /// Parameters for fuzzy matching, and other autocomplete query behaviors. - public partial class AutocompleteOptions - { - /// Initializes a new instance of . - /// The search text on which to base autocomplete results. - /// Specifies the mode for Autocomplete. The default is 'oneTerm'. Use 'twoTerms' to get shingles and 'oneTermWithContext' to use the current context while producing auto-completed terms. - /// An OData expression that filters the documents used to produce completed terms for the Autocomplete result. - /// A value indicating whether to use fuzzy matching for the autocomplete query. Default is false. When set to true, the query will autocomplete terms even if there's a substituted or missing character in the search text. While this provides a better experience in some scenarios, it comes at a performance cost as fuzzy autocomplete queries are slower and consume more resources. - /// A string tag that is appended to hit highlights. Must be set with highlightPreTag. If omitted, hit highlighting is disabled. - /// A string tag that is prepended to hit highlights. Must be set with highlightPostTag. If omitted, hit highlighting is disabled. - /// A number between 0 and 100 indicating the percentage of the index that must be covered by an autocomplete query in order for the query to be reported as a success. This parameter can be useful for ensuring search availability even for services with only one replica. The default is 80. - /// The comma-separated list of field names to consider when querying for auto-completed terms. Target fields must be included in the specified suggester. - /// The name of the suggester as specified in the suggesters collection that's part of the index definition. - /// The number of auto-completed terms to retrieve. This must be a value between 1 and 100. The default is 5. - internal AutocompleteOptions(string searchText, AutocompleteMode? mode, string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, string searchFieldsRaw, string suggesterName, int? size) - { - SearchText = searchText; - Mode = mode; - Filter = filter; - UseFuzzyMatching = useFuzzyMatching; - HighlightPostTag = highlightPostTag; - HighlightPreTag = highlightPreTag; - MinimumCoverage = minimumCoverage; - SearchFieldsRaw = searchFieldsRaw; - SuggesterName = suggesterName; - Size = size; - } - /// A value indicating whether to use fuzzy matching for the autocomplete query. Default is false. When set to true, the query will autocomplete terms even if there's a substituted or missing character in the search text. While this provides a better experience in some scenarios, it comes at a performance cost as fuzzy autocomplete queries are slower and consume more resources. - public bool? UseFuzzyMatching { get; set; } - /// A string tag that is appended to hit highlights. Must be set with highlightPreTag. If omitted, hit highlighting is disabled. - public string HighlightPostTag { get; set; } - /// A string tag that is prepended to hit highlights. Must be set with highlightPostTag. If omitted, hit highlighting is disabled. - public string HighlightPreTag { get; set; } - /// A number between 0 and 100 indicating the percentage of the index that must be covered by an autocomplete query in order for the query to be reported as a success. This parameter can be useful for ensuring search availability even for services with only one replica. The default is 80. - public double? MinimumCoverage { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.Serialization.cs deleted file mode 100644 index ebd61028ccee..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class AutocompleteResults - { - internal static AutocompleteResults DeserializeAutocompleteResults(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double? searchCoverage = default; - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@search.coverage"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchCoverage = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(AutocompleteItem.DeserializeAutocompleteItem(item)); - } - value = array; - continue; - } - } - return new AutocompleteResults(searchCoverage, value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AutocompleteResults FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAutocompleteResults(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.cs deleted file mode 100644 index ec145b56ea09..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AutocompleteResults.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Models -{ - /// The result of Autocomplete query. - public partial class AutocompleteResults - { - /// Initializes a new instance of . - /// The list of returned Autocompleted items. - internal AutocompleteResults(IEnumerable results) - { - Results = results.ToList(); - } - - /// Initializes a new instance of . - /// A value indicating the percentage of the index that was considered by the autocomplete request, or null if minimumCoverage was not specified in the request. - /// The list of returned Autocompleted items. - internal AutocompleteResults(double? coverage, IReadOnlyList results) - { - Coverage = coverage; - Results = results; - } - - /// A value indicating the percentage of the index that was considered by the autocomplete request, or null if minimumCoverage was not specified in the request. - public double? Coverage { get; } - /// The list of returned Autocompleted items. - public IReadOnlyList Results { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.Serialization.cs deleted file mode 100644 index 48d289ea39d2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class AzureActiveDirectoryApplicationCredentials : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("applicationId"u8); - writer.WriteStringValue(ApplicationId); - if (Optional.IsDefined(ApplicationSecret)) - { - writer.WritePropertyName("applicationSecret"u8); - writer.WriteStringValue(ApplicationSecret); - } - writer.WriteEndObject(); - } - - internal static AzureActiveDirectoryApplicationCredentials DeserializeAzureActiveDirectoryApplicationCredentials(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string applicationId = default; - string applicationSecret = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("applicationId"u8)) - { - applicationId = property.Value.GetString(); - continue; - } - if (property.NameEquals("applicationSecret"u8)) - { - applicationSecret = property.Value.GetString(); - continue; - } - } - return new AzureActiveDirectoryApplicationCredentials(applicationId, applicationSecret); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AzureActiveDirectoryApplicationCredentials FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureActiveDirectoryApplicationCredentials(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.cs deleted file mode 100644 index 51c88660d511..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureActiveDirectoryApplicationCredentials.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Credentials of a registered application created for your search service, used for authenticated access to the encryption keys stored in Azure Key Vault. - internal partial class AzureActiveDirectoryApplicationCredentials - { - /// Initializes a new instance of . - /// An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. - /// is null. - public AzureActiveDirectoryApplicationCredentials(string applicationId) - { - Argument.AssertNotNull(applicationId, nameof(applicationId)); - - ApplicationId = applicationId; - } - - /// Initializes a new instance of . - /// An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. - /// The authentication key of the specified AAD application. - internal AzureActiveDirectoryApplicationCredentials(string applicationId, string applicationSecret) - { - ApplicationId = applicationId; - ApplicationSecret = applicationSecret; - } - - /// An AAD Application ID that was granted the required access permissions to the Azure Key Vault that is to be used when encrypting your data at rest. The Application ID should not be confused with the Object ID for your AAD Application. - public string ApplicationId { get; set; } - /// The authentication key of the specified AAD application. - public string ApplicationSecret { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.Serialization.cs deleted file mode 100644 index 1510e243157a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.Serialization.cs +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureMachineLearningParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (ScoringUri != null) - { - writer.WritePropertyName("uri"u8); - writer.WriteStringValue(ScoringUri.AbsoluteUri); - } - else - { - writer.WriteNull("uri"); - } - if (Optional.IsDefined(AuthenticationKey)) - { - if (AuthenticationKey != null) - { - writer.WritePropertyName("key"u8); - writer.WriteStringValue(AuthenticationKey); - } - else - { - writer.WriteNull("key"); - } - } - if (Optional.IsDefined(ResourceId)) - { - if (ResourceId != null) - { - writer.WritePropertyName("resourceId"u8); - writer.WriteStringValue(ResourceId); - } - else - { - writer.WriteNull("resourceId"); - } - } - if (Optional.IsDefined(Timeout)) - { - if (Timeout != null) - { - writer.WritePropertyName("timeout"u8); - writer.WriteStringValue(Timeout.Value, "P"); - } - else - { - writer.WriteNull("timeout"); - } - } - if (Optional.IsDefined(Region)) - { - if (Region != null) - { - writer.WritePropertyName("region"u8); - writer.WriteStringValue(Region); - } - else - { - writer.WriteNull("region"); - } - } - if (Optional.IsDefined(ModelName)) - { - writer.WritePropertyName("modelName"u8); - writer.WriteStringValue(ModelName.Value.ToString()); - } - writer.WriteEndObject(); - } - - internal static AzureMachineLearningParameters DeserializeAzureMachineLearningParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - Uri uri = default; - string key = default; - string resourceId = default; - TimeSpan? timeout = default; - string region = default; - AIFoundryModelCatalogName? modelName = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("uri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - uri = null; - continue; - } - uri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("key"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - key = null; - continue; - } - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("resourceId"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - resourceId = null; - continue; - } - resourceId = property.Value.GetString(); - continue; - } - if (property.NameEquals("timeout"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - timeout = null; - continue; - } - timeout = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("region"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - region = null; - continue; - } - region = property.Value.GetString(); - continue; - } - if (property.NameEquals("modelName"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - modelName = new AIFoundryModelCatalogName(property.Value.GetString()); - continue; - } - } - return new AzureMachineLearningParameters( - uri, - key, - resourceId, - timeout, - region, - modelName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AzureMachineLearningParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureMachineLearningParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.cs deleted file mode 100644 index 933ce5586932..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningParameters.cs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the properties for connecting to an AML vectorizer. - public partial class AzureMachineLearningParameters - { - /// Initializes a new instance of . - /// (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. - public AzureMachineLearningParameters(Uri scoringUri) - { - ScoringUri = scoringUri; - } - - /// Initializes a new instance of . - /// (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. - /// (Required for key authentication) The key for the AML service. - /// (Required for token authentication). The Azure Resource Manager resource ID of the AML service. It should be in the format subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. - /// (Optional) When specified, indicates the timeout for the http client making the API call. - /// (Optional for token authentication). The region the AML service is deployed in. - /// The name of the embedding model from the Azure AI Studio Catalog that is deployed at the provided endpoint. - internal AzureMachineLearningParameters(Uri scoringUri, string authenticationKey, string resourceId, TimeSpan? timeout, string region, AIFoundryModelCatalogName? modelName) - { - ScoringUri = scoringUri; - AuthenticationKey = authenticationKey; - ResourceId = resourceId; - Timeout = timeout; - Region = region; - ModelName = modelName; - } - - /// (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. - public Uri ScoringUri { get; set; } - /// (Required for key authentication) The key for the AML service. - public string AuthenticationKey { get; set; } - /// (Required for token authentication). The Azure Resource Manager resource ID of the AML service. It should be in the format subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. - public string ResourceId { get; set; } - /// (Optional) When specified, indicates the timeout for the http client making the API call. - public TimeSpan? Timeout { get; set; } - /// (Optional for token authentication). The region the AML service is deployed in. - public string Region { get; set; } - /// The name of the embedding model from the Azure AI Studio Catalog that is deployed at the provided endpoint. - public AIFoundryModelCatalogName? ModelName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.Serialization.cs deleted file mode 100644 index 0de76f2f4ba9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.Serialization.cs +++ /dev/null @@ -1,278 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureMachineLearningSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ScoringUri)) - { - if (ScoringUri != null) - { - writer.WritePropertyName("uri"u8); - writer.WriteStringValue(ScoringUri.AbsoluteUri); - } - else - { - writer.WriteNull("uri"); - } - } - if (Optional.IsDefined(AuthenticationKey)) - { - if (AuthenticationKey != null) - { - writer.WritePropertyName("key"u8); - writer.WriteStringValue(AuthenticationKey); - } - else - { - writer.WriteNull("key"); - } - } - if (Optional.IsDefined(RawResourceId)) - { - if (RawResourceId != null) - { - writer.WritePropertyName("resourceId"u8); - writer.WriteStringValue(RawResourceId); - } - else - { - writer.WriteNull("resourceId"); - } - } - if (Optional.IsDefined(Timeout)) - { - if (Timeout != null) - { - writer.WritePropertyName("timeout"u8); - writer.WriteStringValue(Timeout.Value, "P"); - } - else - { - writer.WriteNull("timeout"); - } - } - if (Optional.IsDefined(RawLocation)) - { - if (RawLocation != null) - { - writer.WritePropertyName("region"u8); - writer.WriteStringValue(RawLocation); - } - else - { - writer.WriteNull("region"); - } - } - if (Optional.IsDefined(DegreeOfParallelism)) - { - if (DegreeOfParallelism != null) - { - writer.WritePropertyName("degreeOfParallelism"u8); - writer.WriteNumberValue(DegreeOfParallelism.Value); - } - else - { - writer.WriteNull("degreeOfParallelism"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static AzureMachineLearningSkill DeserializeAzureMachineLearningSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - Uri uri = default; - string key = default; - string resourceId = default; - TimeSpan? timeout = default; - string region = default; - int? degreeOfParallelism = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("uri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - uri = null; - continue; - } - uri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("key"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - key = null; - continue; - } - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("resourceId"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - resourceId = null; - continue; - } - resourceId = property.Value.GetString(); - continue; - } - if (property.NameEquals("timeout"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - timeout = null; - continue; - } - timeout = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("region"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - region = null; - continue; - } - region = property.Value.GetString(); - continue; - } - if (property.NameEquals("degreeOfParallelism"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - degreeOfParallelism = null; - continue; - } - degreeOfParallelism = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new AzureMachineLearningSkill( - odataType, - name, - description, - context, - inputs, - outputs, - uri, - key, - resourceId, - timeout, - region, - degreeOfParallelism); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AzureMachineLearningSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureMachineLearningSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.cs deleted file mode 100644 index a8485c08910d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningSkill.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The AML skill allows you to extend AI enrichment with a custom Azure Machine Learning (AML) model. Once an AML model is trained and deployed, an AML skill integrates it into AI enrichment. - public partial class AzureMachineLearningSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. - /// (Required for key authentication) The key for the AML service. - /// (Required for token authentication). The Azure Resource Manager resource ID of the AML service. It should be in the format subscriptions/{guid}/resourceGroups/{resource-group-name}/Microsoft.MachineLearningServices/workspaces/{workspace-name}/services/{service_name}. - /// (Optional) When specified, indicates the timeout for the http client making the API call. - /// (Optional for token authentication). The region the AML service is deployed in. - /// (Optional) When specified, indicates the number of calls the indexer will make in parallel to the endpoint you have provided. You can decrease this value if your endpoint is failing under too high of a request load, or raise it if your endpoint is able to accept more requests and you would like an increase in the performance of the indexer. If not set, a default value of 5 is used. The degreeOfParallelism can be set to a maximum of 10 and a minimum of 1. - internal AzureMachineLearningSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, Uri scoringUri, string authenticationKey, string rawResourceId, TimeSpan? timeout, string rawLocation, int? degreeOfParallelism) : base(oDataType, name, description, context, inputs, outputs) - { - ScoringUri = scoringUri; - AuthenticationKey = authenticationKey; - RawResourceId = rawResourceId; - Timeout = timeout; - RawLocation = rawLocation; - DegreeOfParallelism = degreeOfParallelism; - ODataType = oDataType ?? "#Microsoft.Skills.Custom.AmlSkill"; - } - /// (Optional) When specified, indicates the timeout for the http client making the API call. - public TimeSpan? Timeout { get; set; } - /// (Optional) When specified, indicates the number of calls the indexer will make in parallel to the endpoint you have provided. You can decrease this value if your endpoint is failing under too high of a request load, or raise it if your endpoint is able to accept more requests and you would like an increase in the performance of the indexer. If not set, a default value of 5 is used. The degreeOfParallelism can be set to a maximum of 10 and a minimum of 1. - public int? DegreeOfParallelism { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.Serialization.cs deleted file mode 100644 index c9a2241a090c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureMachineLearningVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(AMLParameters)) - { - writer.WritePropertyName("amlParameters"u8); - writer.WriteObjectValue(AMLParameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static AzureMachineLearningVectorizer DeserializeAzureMachineLearningVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - AzureMachineLearningParameters amlParameters = default; - string name = default; - VectorSearchVectorizerKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("amlParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - amlParameters = AzureMachineLearningParameters.DeserializeAzureMachineLearningParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchVectorizerKind(property.Value.GetString()); - continue; - } - } - return new AzureMachineLearningVectorizer(name, kind, amlParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AzureMachineLearningVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureMachineLearningVectorizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.cs deleted file mode 100644 index 7aef8e982c11..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureMachineLearningVectorizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies an Azure Machine Learning endpoint deployed via the Azure AI Studio Model Catalog for generating the vector embedding of a query string. - public partial class AzureMachineLearningVectorizer : VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// is null. - public AzureMachineLearningVectorizer(string vectorizerName) : base(vectorizerName) - { - Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); - - Kind = VectorSearchVectorizerKind.AML; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - /// Specifies the properties of the AML vectorizer. - internal AzureMachineLearningVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, AzureMachineLearningParameters amlParameters) : base(vectorizerName, kind) - { - AMLParameters = amlParameters; - Kind = kind; - } - - /// Specifies the properties of the AML vectorizer. - public AzureMachineLearningParameters AMLParameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.Serialization.cs deleted file mode 100644 index 61e34001205e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.Serialization.cs +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureOpenAIEmbeddingSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Dimensions)) - { - if (Dimensions != null) - { - writer.WritePropertyName("dimensions"u8); - writer.WriteNumberValue(Dimensions.Value); - } - else - { - writer.WriteNull("dimensions"); - } - } - if (Optional.IsDefined(ResourceUri)) - { - writer.WritePropertyName("resourceUri"u8); - writer.WriteStringValue(ResourceUri.AbsoluteUri); - } - if (Optional.IsDefined(DeploymentName)) - { - writer.WritePropertyName("deploymentId"u8); - writer.WriteStringValue(DeploymentName); - } - if (Optional.IsDefined(ApiKey)) - { - writer.WritePropertyName("apiKey"u8); - writer.WriteStringValue(ApiKey); - } - if (Optional.IsDefined(AuthenticationIdentity)) - { - if (AuthenticationIdentity != null) - { - writer.WritePropertyName("authIdentity"u8); - writer.WriteObjectValue(AuthenticationIdentity); - } - else - { - writer.WriteNull("authIdentity"); - } - } - if (Optional.IsDefined(ModelName)) - { - writer.WritePropertyName("modelName"u8); - writer.WriteStringValue(ModelName.Value.ToString()); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static AzureOpenAIEmbeddingSkill DeserializeAzureOpenAIEmbeddingSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? dimensions = default; - Uri resourceUri = default; - string deploymentId = default; - string apiKey = default; - SearchIndexerDataIdentity authIdentity = default; - AzureOpenAIModelName? modelName = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("dimensions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dimensions = null; - continue; - } - dimensions = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("resourceUri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - resourceUri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("deploymentId"u8)) - { - deploymentId = property.Value.GetString(); - continue; - } - if (property.NameEquals("apiKey"u8)) - { - apiKey = property.Value.GetString(); - continue; - } - if (property.NameEquals("authIdentity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authIdentity = null; - continue; - } - authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("modelName"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - modelName = new AzureOpenAIModelName(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new AzureOpenAIEmbeddingSkill( - odataType, - name, - description, - context, - inputs, - outputs, - dimensions, - resourceUri, - deploymentId, - apiKey, - authIdentity, - modelName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AzureOpenAIEmbeddingSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureOpenAIEmbeddingSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.cs deleted file mode 100644 index 0f9de43318f3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIEmbeddingSkill.cs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Allows you to generate a vector embedding for a given text input using the Azure OpenAI resource. - public partial class AzureOpenAIEmbeddingSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public AzureOpenAIEmbeddingSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models. - /// The resource URI of the Azure OpenAI resource. - /// ID of the Azure OpenAI model deployment on the designated resource. - /// API key of the designated Azure OpenAI resource. - /// - /// The user-assigned managed identity used for outbound connections. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The name of the embedding model that is deployed at the provided deploymentId path. - internal AzureOpenAIEmbeddingSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, int? dimensions, Uri resourceUri, string deploymentName, string apiKey, SearchIndexerDataIdentity authenticationIdentity, AzureOpenAIModelName? modelName) : base(oDataType, name, description, context, inputs, outputs) - { - Dimensions = dimensions; - ResourceUri = resourceUri; - DeploymentName = deploymentName; - ApiKey = apiKey; - AuthenticationIdentity = authenticationIdentity; - ModelName = modelName; - ODataType = oDataType ?? "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill"; - } - - /// The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models. - public int? Dimensions { get; set; } - /// The resource URI of the Azure OpenAI resource. - public Uri ResourceUri { get; set; } - /// ID of the Azure OpenAI model deployment on the designated resource. - public string DeploymentName { get; set; } - /// API key of the designated Azure OpenAI resource. - public string ApiKey { get; set; } - /// - /// The user-assigned managed identity used for outbound connections. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity AuthenticationIdentity { get; set; } - /// The name of the embedding model that is deployed at the provided deploymentId path. - public AzureOpenAIModelName? ModelName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.Serialization.cs deleted file mode 100644 index 6d94553c7c80..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.Serialization.cs +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureOpenAITokenizerParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(EncoderModelName)) - { - if (EncoderModelName != null) - { - writer.WritePropertyName("encoderModelName"u8); - writer.WriteStringValue(EncoderModelName.Value.ToString()); - } - else - { - writer.WriteNull("encoderModelName"); - } - } - if (Optional.IsCollectionDefined(AllowedSpecialTokens)) - { - writer.WritePropertyName("allowedSpecialTokens"u8); - writer.WriteStartArray(); - foreach (var item in AllowedSpecialTokens) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static AzureOpenAITokenizerParameters DeserializeAzureOpenAITokenizerParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SplitSkillEncoderModelName? encoderModelName = default; - IList allowedSpecialTokens = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("encoderModelName"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encoderModelName = null; - continue; - } - encoderModelName = new SplitSkillEncoderModelName(property.Value.GetString()); - continue; - } - if (property.NameEquals("allowedSpecialTokens"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - allowedSpecialTokens = array; - continue; - } - } - return new AzureOpenAITokenizerParameters(encoderModelName, allowedSpecialTokens ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AzureOpenAITokenizerParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureOpenAITokenizerParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.cs deleted file mode 100644 index 89b1591b8f24..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAITokenizerParameters.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The AzureOpenAITokenizerParameters. - public partial class AzureOpenAITokenizerParameters - { - /// Initializes a new instance of . - public AzureOpenAITokenizerParameters() - { - AllowedSpecialTokens = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Only applies if the unit is set to azureOpenAITokens. Options include 'R50k_base', 'P50k_base', 'P50k_edit' and 'CL100k_base'. The default value is 'CL100k_base'. - /// (Optional) Only applies if the unit is set to azureOpenAITokens. This parameter defines a collection of special tokens that are permitted within the tokenization process. - internal AzureOpenAITokenizerParameters(SplitSkillEncoderModelName? encoderModelName, IList allowedSpecialTokens) - { - EncoderModelName = encoderModelName; - AllowedSpecialTokens = allowedSpecialTokens; - } - - /// Only applies if the unit is set to azureOpenAITokens. Options include 'R50k_base', 'P50k_base', 'P50k_edit' and 'CL100k_base'. The default value is 'CL100k_base'. - public SplitSkillEncoderModelName? EncoderModelName { get; set; } - /// (Optional) Only applies if the unit is set to azureOpenAITokens. This parameter defines a collection of special tokens that are permitted within the tokenization process. - public IList AllowedSpecialTokens { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.Serialization.cs deleted file mode 100644 index a8c82dbcad47..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureOpenAIVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("azureOpenAIParameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static AzureOpenAIVectorizer DeserializeAzureOpenAIVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - AzureOpenAIVectorizerParameters azureOpenAIParameters = default; - string name = default; - VectorSearchVectorizerKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("azureOpenAIParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - azureOpenAIParameters = AzureOpenAIVectorizerParameters.DeserializeAzureOpenAIVectorizerParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchVectorizerKind(property.Value.GetString()); - continue; - } - } - return new AzureOpenAIVectorizer(name, kind, azureOpenAIParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new AzureOpenAIVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureOpenAIVectorizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.cs deleted file mode 100644 index 8e7280613ed0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the Azure OpenAI resource used to vectorize a query string. - public partial class AzureOpenAIVectorizer : VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// is null. - public AzureOpenAIVectorizer(string vectorizerName) : base(vectorizerName) - { - Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); - - Kind = VectorSearchVectorizerKind.AzureOpenAI; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - /// Contains the parameters specific to Azure OpenAI embedding vectorization. - internal AzureOpenAIVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, AzureOpenAIVectorizerParameters parameters) : base(vectorizerName, kind) - { - Parameters = parameters; - Kind = kind; - } - - /// Contains the parameters specific to Azure OpenAI embedding vectorization. - public AzureOpenAIVectorizerParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.Serialization.cs deleted file mode 100644 index fb61f808ea3c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.Serialization.cs +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class AzureOpenAIVectorizerParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ResourceUri)) - { - writer.WritePropertyName("resourceUri"u8); - writer.WriteStringValue(ResourceUri.AbsoluteUri); - } - if (Optional.IsDefined(DeploymentName)) - { - writer.WritePropertyName("deploymentId"u8); - writer.WriteStringValue(DeploymentName); - } - if (Optional.IsDefined(ApiKey)) - { - writer.WritePropertyName("apiKey"u8); - writer.WriteStringValue(ApiKey); - } - if (Optional.IsDefined(AuthenticationIdentity)) - { - if (AuthenticationIdentity != null) - { - writer.WritePropertyName("authIdentity"u8); - writer.WriteObjectValue(AuthenticationIdentity); - } - else - { - writer.WriteNull("authIdentity"); - } - } - if (Optional.IsDefined(ModelName)) - { - writer.WritePropertyName("modelName"u8); - writer.WriteStringValue(ModelName.Value.ToString()); - } - writer.WriteEndObject(); - } - - internal static AzureOpenAIVectorizerParameters DeserializeAzureOpenAIVectorizerParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - Uri resourceUri = default; - string deploymentId = default; - string apiKey = default; - SearchIndexerDataIdentity authIdentity = default; - AzureOpenAIModelName? modelName = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("resourceUri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - resourceUri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("deploymentId"u8)) - { - deploymentId = property.Value.GetString(); - continue; - } - if (property.NameEquals("apiKey"u8)) - { - apiKey = property.Value.GetString(); - continue; - } - if (property.NameEquals("authIdentity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authIdentity = null; - continue; - } - authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("modelName"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - modelName = new AzureOpenAIModelName(property.Value.GetString()); - continue; - } - } - return new AzureOpenAIVectorizerParameters(resourceUri, deploymentId, apiKey, authIdentity, modelName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static AzureOpenAIVectorizerParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeAzureOpenAIVectorizerParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.cs deleted file mode 100644 index 9243c5e40dfc..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/AzureOpenAIVectorizerParameters.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the parameters for connecting to the Azure OpenAI resource. - public partial class AzureOpenAIVectorizerParameters - { - /// Initializes a new instance of . - public AzureOpenAIVectorizerParameters() - { - } - - /// Initializes a new instance of . - /// The resource URI of the Azure OpenAI resource. - /// ID of the Azure OpenAI model deployment on the designated resource. - /// API key of the designated Azure OpenAI resource. - /// - /// The user-assigned managed identity used for outbound connections. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The name of the embedding model that is deployed at the provided deploymentId path. - internal AzureOpenAIVectorizerParameters(Uri resourceUri, string deploymentName, string apiKey, SearchIndexerDataIdentity authenticationIdentity, AzureOpenAIModelName? modelName) - { - ResourceUri = resourceUri; - DeploymentName = deploymentName; - ApiKey = apiKey; - AuthenticationIdentity = authenticationIdentity; - ModelName = modelName; - } - - /// The resource URI of the Azure OpenAI resource. - public Uri ResourceUri { get; set; } - /// ID of the Azure OpenAI model deployment on the designated resource. - public string DeploymentName { get; set; } - /// API key of the designated Azure OpenAI resource. - public string ApiKey { get; set; } - /// - /// The user-assigned managed identity used for outbound connections. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity AuthenticationIdentity { get; set; } - /// The name of the embedding model that is deployed at the provided deploymentId path. - public AzureOpenAIModelName? ModelName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.Serialization.cs deleted file mode 100644 index f1aa6740169c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.Serialization.cs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class BM25Similarity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(K1)) - { - if (K1 != null) - { - writer.WritePropertyName("k1"u8); - writer.WriteNumberValue(K1.Value); - } - else - { - writer.WriteNull("k1"); - } - } - if (Optional.IsDefined(B)) - { - if (B != null) - { - writer.WritePropertyName("b"u8); - writer.WriteNumberValue(B.Value); - } - else - { - writer.WriteNull("b"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static BM25Similarity DeserializeBM25Similarity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double? k1 = default; - double? b = default; - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("k1"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - k1 = null; - continue; - } - k1 = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("b"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - b = null; - continue; - } - b = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new BM25Similarity(odataType, k1, b); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new BM25Similarity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeBM25Similarity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.cs deleted file mode 100644 index c6e8036edf53..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BM25Similarity.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Ranking function based on the Okapi BM25 similarity algorithm. BM25 is a TF-IDF-like algorithm that includes length normalization (controlled by the 'b' parameter) as well as term frequency saturation (controlled by the 'k1' parameter). - public partial class BM25Similarity : SimilarityAlgorithm - { - /// Initializes a new instance of . - public BM25Similarity() - { - ODataType = "#Microsoft.Azure.Search.BM25Similarity"; - } - - /// Initializes a new instance of . - /// - /// This property controls the scaling function between the term frequency of each matching terms and the final relevance score of a document-query pair. By default, a value of 1.2 is used. A value of 0.0 means the score does not scale with an increase in term frequency. - /// This property controls how the length of a document affects the relevance score. By default, a value of 0.75 is used. A value of 0.0 means no length normalization is applied, while a value of 1.0 means the score is fully normalized by the length of the document. - internal BM25Similarity(string oDataType, double? k1, double? b) : base(oDataType) - { - K1 = k1; - B = b; - ODataType = oDataType ?? "#Microsoft.Azure.Search.BM25Similarity"; - } - - /// This property controls the scaling function between the term frequency of each matching terms and the final relevance score of a document-query pair. By default, a value of 1.2 is used. A value of 0.0 means the score does not scale with an increase in term frequency. - public double? K1 { get; set; } - /// This property controls how the length of a document affects the relevance score. By default, a value of 0.75 is used. A value of 0.0 means no length normalization is applied, while a value of 1.0 means the score is fully normalized by the length of the document. - public double? B { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.Serialization.cs deleted file mode 100644 index 5913c87f85d7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.Serialization.cs +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class BinaryQuantizationCompression : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(CompressionName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(RerankWithOriginalVectors)) - { - writer.WritePropertyName("rerankWithOriginalVectors"u8); - writer.WriteBooleanValue(RerankWithOriginalVectors.Value); - } - if (Optional.IsDefined(DefaultOversampling)) - { - if (DefaultOversampling != null) - { - writer.WritePropertyName("defaultOversampling"u8); - writer.WriteNumberValue(DefaultOversampling.Value); - } - else - { - writer.WriteNull("defaultOversampling"); - } - } - if (Optional.IsDefined(RescoringOptions)) - { - if (RescoringOptions != null) - { - writer.WritePropertyName("rescoringOptions"u8); - writer.WriteObjectValue(RescoringOptions); - } - else - { - writer.WriteNull("rescoringOptions"); - } - } - if (Optional.IsDefined(TruncationDimension)) - { - if (TruncationDimension != null) - { - writer.WritePropertyName("truncationDimension"u8); - writer.WriteNumberValue(TruncationDimension.Value); - } - else - { - writer.WriteNull("truncationDimension"); - } - } - writer.WriteEndObject(); - } - - internal static BinaryQuantizationCompression DeserializeBinaryQuantizationCompression(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - VectorSearchCompressionKind kind = default; - bool? rerankWithOriginalVectors = default; - double? defaultOversampling = default; - RescoringOptions rescoringOptions = default; - int? truncationDimension = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchCompressionKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("rerankWithOriginalVectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - rerankWithOriginalVectors = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultOversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultOversampling = null; - continue; - } - defaultOversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("rescoringOptions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - rescoringOptions = null; - continue; - } - rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value); - continue; - } - if (property.NameEquals("truncationDimension"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - truncationDimension = null; - continue; - } - truncationDimension = property.Value.GetInt32(); - continue; - } - } - return new BinaryQuantizationCompression( - name, - kind, - rerankWithOriginalVectors, - defaultOversampling, - rescoringOptions, - truncationDimension); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new BinaryQuantizationCompression FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeBinaryQuantizationCompression(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs deleted file mode 100644 index fc8caec0912b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains configuration options specific to the binary quantization compression method used during indexing and querying. - public partial class BinaryQuantizationCompression : VectorSearchCompression - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - public BinaryQuantizationCompression(string compressionName) : base(compressionName) - { - Argument.AssertNotNull(compressionName, nameof(compressionName)); - - Kind = VectorSearchCompressionKind.BinaryQuantization; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. - /// Contains the options for rescoring. - /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. - internal BinaryQuantizationCompression(string compressionName, VectorSearchCompressionKind kind, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension) : base(compressionName, kind, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerImageAction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerImageAction.cs deleted file mode 100644 index b9a83eae4b1a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerImageAction.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Determines how to process embedded images and image files in Azure blob storage. Setting the "imageAction" configuration to any value other than "none" requires that a skillset also be attached to that indexer. - public readonly partial struct BlobIndexerImageAction : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public BlobIndexerImageAction(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string NoneValue = "none"; - private const string GenerateNormalizedImagesValue = "generateNormalizedImages"; - private const string GenerateNormalizedImagePerPageValue = "generateNormalizedImagePerPage"; - - /// Ignores embedded images or image files in the data set. This is the default. - public static BlobIndexerImageAction None { get; } = new BlobIndexerImageAction(NoneValue); - /// Extracts text from images (for example, the word "STOP" from a traffic stop sign), and embeds it into the content field. This action requires that "dataToExtract" is set to "contentAndMetadata". A normalized image refers to additional processing resulting in uniform image output, sized and rotated to promote consistent rendering when you include images in visual search results. This information is generated for each image when you use this option. - public static BlobIndexerImageAction GenerateNormalizedImages { get; } = new BlobIndexerImageAction(GenerateNormalizedImagesValue); - /// Extracts text from images (for example, the word "STOP" from a traffic stop sign), and embeds it into the content field, but treats PDF files differently in that each page will be rendered as an image and normalized accordingly, instead of extracting embedded images. Non-PDF file types will be treated the same as if "generateNormalizedImages" was set. - public static BlobIndexerImageAction GenerateNormalizedImagePerPage { get; } = new BlobIndexerImageAction(GenerateNormalizedImagePerPageValue); - /// Determines if two values are the same. - public static bool operator ==(BlobIndexerImageAction left, BlobIndexerImageAction right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(BlobIndexerImageAction left, BlobIndexerImageAction right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator BlobIndexerImageAction(string value) => new BlobIndexerImageAction(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is BlobIndexerImageAction other && Equals(other); - /// - public bool Equals(BlobIndexerImageAction other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerPdfTextRotationAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerPdfTextRotationAlgorithm.cs deleted file mode 100644 index 22cdc3a08f7b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BlobIndexerPdfTextRotationAlgorithm.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Determines algorithm for text extraction from PDF files in Azure blob storage. - public readonly partial struct BlobIndexerPdfTextRotationAlgorithm : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public BlobIndexerPdfTextRotationAlgorithm(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string NoneValue = "none"; - private const string DetectAnglesValue = "detectAngles"; - - /// Leverages normal text extraction. This is the default. - public static BlobIndexerPdfTextRotationAlgorithm None { get; } = new BlobIndexerPdfTextRotationAlgorithm(NoneValue); - /// May produce better and more readable text extraction from PDF files that have rotated text within them. Note that there may be a small performance speed impact when this parameter is used. This parameter only applies to PDF files, and only to PDFs with embedded text. If the rotated text appears within an embedded image in the PDF, this parameter does not apply. - public static BlobIndexerPdfTextRotationAlgorithm DetectAngles { get; } = new BlobIndexerPdfTextRotationAlgorithm(DetectAnglesValue); - /// Determines if two values are the same. - public static bool operator ==(BlobIndexerPdfTextRotationAlgorithm left, BlobIndexerPdfTextRotationAlgorithm right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(BlobIndexerPdfTextRotationAlgorithm left, BlobIndexerPdfTextRotationAlgorithm right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator BlobIndexerPdfTextRotationAlgorithm(string value) => new BlobIndexerPdfTextRotationAlgorithm(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is BlobIndexerPdfTextRotationAlgorithm other && Equals(other); - /// - public bool Equals(BlobIndexerPdfTextRotationAlgorithm other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.Serialization.cs deleted file mode 100644 index 8e2616ac76b0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.Serialization.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CharFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static CharFilter DeserializeCharFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.MappingCharFilter": return MappingCharFilter.DeserializeMappingCharFilter(element); - case "#Microsoft.Azure.Search.PatternReplaceCharFilter": return PatternReplaceCharFilter.DeserializePatternReplaceCharFilter(element); - } - } - return UnknownCharFilter.DeserializeUnknownCharFilter(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static CharFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCharFilter(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.cs deleted file mode 100644 index 151e721c598c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CharFilter.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for character filters. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public partial class CharFilter - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of char filter. - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal CharFilter(string oDataType, string name) - { - ODataType = oDataType; - Name = name; - } - - /// A URI fragment specifying the type of char filter. - internal string ODataType { get; set; } - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.Serialization.cs deleted file mode 100644 index 13637415f7c4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.Serialization.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CjkBigramTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(IgnoreScripts)) - { - writer.WritePropertyName("ignoreScripts"u8); - writer.WriteStartArray(); - foreach (var item in IgnoreScripts) - { - writer.WriteStringValue(item.ToSerialString()); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(OutputUnigrams)) - { - writer.WritePropertyName("outputUnigrams"u8); - writer.WriteBooleanValue(OutputUnigrams.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static CjkBigramTokenFilter DeserializeCjkBigramTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList ignoreScripts = default; - bool? outputUnigrams = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("ignoreScripts"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString().ToCjkBigramTokenFilterScripts()); - } - ignoreScripts = array; - continue; - } - if (property.NameEquals("outputUnigrams"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - outputUnigrams = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new CjkBigramTokenFilter(odataType, name, ignoreScripts ?? new ChangeTrackingList(), outputUnigrams); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CjkBigramTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCjkBigramTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.cs deleted file mode 100644 index 5e1587823f5f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilter.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Forms bigrams of CJK terms that are generated from the standard tokenizer. This token filter is implemented using Apache Lucene. - public partial class CjkBigramTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public CjkBigramTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - IgnoreScripts = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.CjkBigramTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The scripts to ignore. - /// A value indicating whether to output both unigrams and bigrams (if true), or just bigrams (if false). Default is false. - internal CjkBigramTokenFilter(string oDataType, string name, IList ignoreScripts, bool? outputUnigrams) : base(oDataType, name) - { - IgnoreScripts = ignoreScripts; - OutputUnigrams = outputUnigrams; - ODataType = oDataType ?? "#Microsoft.Azure.Search.CjkBigramTokenFilter"; - } - /// A value indicating whether to output both unigrams and bigrams (if true), or just bigrams (if false). Default is false. - public bool? OutputUnigrams { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.Serialization.cs deleted file mode 100644 index 86ec9db57317..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.Serialization.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class CjkBigramTokenFilterScriptsExtensions - { - public static string ToSerialString(this CjkBigramTokenFilterScripts value) => value switch - { - CjkBigramTokenFilterScripts.Han => "han", - CjkBigramTokenFilterScripts.Hiragana => "hiragana", - CjkBigramTokenFilterScripts.Katakana => "katakana", - CjkBigramTokenFilterScripts.Hangul => "hangul", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown CjkBigramTokenFilterScripts value.") - }; - - public static CjkBigramTokenFilterScripts ToCjkBigramTokenFilterScripts(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "han")) return CjkBigramTokenFilterScripts.Han; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hiragana")) return CjkBigramTokenFilterScripts.Hiragana; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "katakana")) return CjkBigramTokenFilterScripts.Katakana; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hangul")) return CjkBigramTokenFilterScripts.Hangul; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown CjkBigramTokenFilterScripts value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.cs deleted file mode 100644 index 214215a923a7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CjkBigramTokenFilterScripts.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Scripts that can be ignored by CjkBigramTokenFilter. - public enum CjkBigramTokenFilterScripts - { - /// Ignore Han script when forming bigrams of CJK terms. - Han, - /// Ignore Hiragana script when forming bigrams of CJK terms. - Hiragana, - /// Ignore Katakana script when forming bigrams of CJK terms. - Katakana, - /// Ignore Hangul script when forming bigrams of CJK terms. - Hangul - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.Serialization.cs deleted file mode 100644 index ed6b96e3b8b9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ClassicSimilarity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static ClassicSimilarity DeserializeClassicSimilarity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new ClassicSimilarity(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ClassicSimilarity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeClassicSimilarity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.cs deleted file mode 100644 index e89c77cc6d70..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicSimilarity.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Legacy similarity algorithm which uses the Lucene TFIDFSimilarity implementation of TF-IDF. This variation of TF-IDF introduces static document length normalization as well as coordinating factors that penalize documents that only partially match the searched queries. - public partial class ClassicSimilarity : SimilarityAlgorithm - { - /// Initializes a new instance of . - public ClassicSimilarity() - { - ODataType = "#Microsoft.Azure.Search.ClassicSimilarity"; - } - - /// Initializes a new instance of . - /// - internal ClassicSimilarity(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "#Microsoft.Azure.Search.ClassicSimilarity"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.Serialization.cs deleted file mode 100644 index 1ac679c96c94..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ClassicTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static ClassicTokenizer DeserializeClassicTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenLength = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new ClassicTokenizer(odataType, name, maxTokenLength); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ClassicTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeClassicTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.cs deleted file mode 100644 index 39e3726888a6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ClassicTokenizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Grammar-based tokenizer that is suitable for processing most European-language documents. This tokenizer is implemented using Apache Lucene. - public partial class ClassicTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public ClassicTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.ClassicTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - internal ClassicTokenizer(string oDataType, string name, int? maxTokenLength) : base(oDataType, name) - { - MaxTokenLength = maxTokenLength; - ODataType = oDataType ?? "#Microsoft.Azure.Search.ClassicTokenizer"; - } - - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - public int? MaxTokenLength { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.Serialization.cs deleted file mode 100644 index 724e47e6e30a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.Serialization.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CognitiveServicesAccount : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static CognitiveServicesAccount DeserializeCognitiveServicesAccount(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.AIServicesByIdentity": return AIServicesAccountIdentity.DeserializeAIServicesAccountIdentity(element); - case "#Microsoft.Azure.Search.AIServicesByKey": return AIServicesAccountKey.DeserializeAIServicesAccountKey(element); - case "#Microsoft.Azure.Search.CognitiveServicesByKey": return CognitiveServicesAccountKey.DeserializeCognitiveServicesAccountKey(element); - case "#Microsoft.Azure.Search.DefaultCognitiveServices": return DefaultCognitiveServicesAccount.DeserializeDefaultCognitiveServicesAccount(element); - } - } - return UnknownCognitiveServicesAccount.DeserializeUnknownCognitiveServicesAccount(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static CognitiveServicesAccount FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCognitiveServicesAccount(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.cs deleted file mode 100644 index d9a85ae2aab0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccount.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for describing any Azure AI service resource attached to a skillset. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public partial class CognitiveServicesAccount - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - internal CognitiveServicesAccount(string oDataType, string description) - { - ODataType = oDataType; - Description = description; - } - - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - internal string ODataType { get; set; } - /// Description of the Azure AI service resource attached to a skillset. - public string Description { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.Serialization.cs deleted file mode 100644 index c9b11e769dbf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.Serialization.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CognitiveServicesAccountKey : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("key"u8); - writer.WriteStringValue(Key); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static CognitiveServicesAccountKey DeserializeCognitiveServicesAccountKey(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string key = default; - string odataType = default; - string description = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - } - return new CognitiveServicesAccountKey(odataType, description, key); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CognitiveServicesAccountKey FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCognitiveServicesAccountKey(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.cs deleted file mode 100644 index 08f1069d2bc5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CognitiveServicesAccountKey.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The multi-region account key of an Azure AI service resource that's attached to a skillset. - public partial class CognitiveServicesAccountKey : CognitiveServicesAccount - { - /// Initializes a new instance of . - /// The key used to provision the Azure AI service resource attached to a skillset. - /// is null. - public CognitiveServicesAccountKey(string key) - { - Argument.AssertNotNull(key, nameof(key)); - - Key = key; - ODataType = "#Microsoft.Azure.Search.CognitiveServicesByKey"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - /// The key used to provision the Azure AI service resource attached to a skillset. - internal CognitiveServicesAccountKey(string oDataType, string description, string key) : base(oDataType, description) - { - Key = key; - ODataType = oDataType ?? "#Microsoft.Azure.Search.CognitiveServicesByKey"; - } - - /// The key used to provision the Azure AI service resource attached to a skillset. - public string Key { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.Serialization.cs deleted file mode 100644 index 2805232d5b02..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.Serialization.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CommonGramTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("commonWords"u8); - writer.WriteStartArray(); - foreach (var item in CommonWords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(IgnoreCase)) - { - writer.WritePropertyName("ignoreCase"u8); - writer.WriteBooleanValue(IgnoreCase.Value); - } - if (Optional.IsDefined(UseQueryMode)) - { - writer.WritePropertyName("queryMode"u8); - writer.WriteBooleanValue(UseQueryMode.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static CommonGramTokenFilter DeserializeCommonGramTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList commonWords = default; - bool? ignoreCase = default; - bool? queryMode = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("commonWords"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - commonWords = array; - continue; - } - if (property.NameEquals("ignoreCase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - ignoreCase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("queryMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - queryMode = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new CommonGramTokenFilter(odataType, name, commonWords, ignoreCase, queryMode); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CommonGramTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCommonGramTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.cs deleted file mode 100644 index eec757558ebd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CommonGramTokenFilter.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. This token filter is implemented using Apache Lucene. - public partial class CommonGramTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The set of common words. - /// or is null. - public CommonGramTokenFilter(string name, IEnumerable commonWords) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(commonWords, nameof(commonWords)); - - CommonWords = commonWords.ToList(); - ODataType = "#Microsoft.Azure.Search.CommonGramTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The set of common words. - /// A value indicating whether common words matching will be case insensitive. Default is false. - /// A value that indicates whether the token filter is in query mode. When in query mode, the token filter generates bigrams and then removes common words and single terms followed by a common word. Default is false. - internal CommonGramTokenFilter(string oDataType, string name, IList commonWords, bool? ignoreCase, bool? useQueryMode) : base(oDataType, name) - { - CommonWords = commonWords; - IgnoreCase = ignoreCase; - UseQueryMode = useQueryMode; - ODataType = oDataType ?? "#Microsoft.Azure.Search.CommonGramTokenFilter"; - } - /// A value indicating whether common words matching will be case insensitive. Default is false. - public bool? IgnoreCase { get; set; } - /// A value that indicates whether the token filter is in query mode. When in query mode, the token filter generates bigrams and then removes common words and single terms followed by a common word. Default is false. - public bool? UseQueryMode { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.Serialization.cs deleted file mode 100644 index 68a8194e6bb0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ConditionalSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static ConditionalSkill DeserializeConditionalSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new ConditionalSkill( - odataType, - name, - description, - context, - inputs, - outputs); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ConditionalSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeConditionalSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.cs deleted file mode 100644 index fc95b4fddb38..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ConditionalSkill.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that enables scenarios that require a Boolean operation to determine the data to assign to an output. - public partial class ConditionalSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public ConditionalSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Util.ConditionalSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - internal ConditionalSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs) : base(oDataType, name, description, context, inputs, outputs) - { - ODataType = oDataType ?? "#Microsoft.Skills.Util.ConditionalSkill"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.Serialization.cs deleted file mode 100644 index 8e0912316e23..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.Serialization.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CorsOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("allowedOrigins"u8); - writer.WriteStartArray(); - foreach (var item in AllowedOrigins) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(MaxAgeInSeconds)) - { - if (MaxAgeInSeconds != null) - { - writer.WritePropertyName("maxAgeInSeconds"u8); - writer.WriteNumberValue(MaxAgeInSeconds.Value); - } - else - { - writer.WriteNull("maxAgeInSeconds"); - } - } - writer.WriteEndObject(); - } - - internal static CorsOptions DeserializeCorsOptions(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList allowedOrigins = default; - long? maxAgeInSeconds = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("allowedOrigins"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - allowedOrigins = array; - continue; - } - if (property.NameEquals("maxAgeInSeconds"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxAgeInSeconds = null; - continue; - } - maxAgeInSeconds = property.Value.GetInt64(); - continue; - } - } - return new CorsOptions(allowedOrigins, maxAgeInSeconds); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static CorsOptions FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCorsOptions(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.cs deleted file mode 100644 index a402fabc5fce..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CorsOptions.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines options to control Cross-Origin Resource Sharing (CORS) for an index. - public partial class CorsOptions - { - /// Initializes a new instance of . - /// The list of origins from which JavaScript code will be granted access to your index. Can contain a list of hosts of the form {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow all origins (not recommended). - /// is null. - public CorsOptions(IEnumerable allowedOrigins) - { - Argument.AssertNotNull(allowedOrigins, nameof(allowedOrigins)); - - AllowedOrigins = allowedOrigins.ToList(); - } - - /// Initializes a new instance of . - /// The list of origins from which JavaScript code will be granted access to your index. Can contain a list of hosts of the form {protocol}://{fully-qualified-domain-name}[:{port#}], or a single '*' to allow all origins (not recommended). - /// The duration for which browsers should cache CORS preflight responses. Defaults to 5 minutes. - internal CorsOptions(IList allowedOrigins, long? maxAgeInSeconds) - { - AllowedOrigins = allowedOrigins; - MaxAgeInSeconds = maxAgeInSeconds; - } - /// The duration for which browsers should cache CORS preflight responses. Defaults to 5 minutes. - public long? MaxAgeInSeconds { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.Serialization.cs deleted file mode 100644 index 705a8ef44dd7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.Serialization.cs +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CustomAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("tokenizer"u8); - writer.WriteStringValue(TokenizerName.ToString()); - if (Optional.IsCollectionDefined(TokenFilters)) - { - writer.WritePropertyName("tokenFilters"u8); - writer.WriteStartArray(); - foreach (var item in TokenFilters) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(CharFilters)) - { - writer.WritePropertyName("charFilters"u8); - writer.WriteStartArray(); - foreach (var item in CharFilters) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static CustomAnalyzer DeserializeCustomAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - LexicalTokenizerName tokenizer = default; - IList tokenFilters = default; - IList charFilters = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tokenizer"u8)) - { - tokenizer = new LexicalTokenizerName(property.Value.GetString()); - continue; - } - if (property.NameEquals("tokenFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new TokenFilterName(item.GetString())); - } - tokenFilters = array; - continue; - } - if (property.NameEquals("charFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - charFilters = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new CustomAnalyzer(odataType, name, tokenizer, tokenFilters ?? new ChangeTrackingList(), charFilters ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CustomAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCustomAnalyzer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.cs deleted file mode 100644 index 5ee2930397a1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomAnalyzer.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Allows you to take control over the process of converting text into indexable/searchable tokens. It's a user-defined configuration consisting of a single predefined tokenizer and one or more filters. The tokenizer is responsible for breaking text into tokens, and the filters for modifying tokens emitted by the tokenizer. - public partial class CustomAnalyzer : LexicalAnalyzer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as breaking a sentence into words. - /// A list of token filters used to filter out or modify the tokens generated by a tokenizer. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. - /// A list of character filters used to prepare input text before it is processed by the tokenizer. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. - internal CustomAnalyzer(string oDataType, string name, LexicalTokenizerName tokenizerName, IList tokenFilters, IList charFilters) : base(oDataType, name) - { - TokenizerName = tokenizerName; - TokenFilters = tokenFilters; - CharFilters = charFilters; - ODataType = oDataType ?? "#Microsoft.Azure.Search.CustomAnalyzer"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.Serialization.cs deleted file mode 100644 index 8a236bbcb5e2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.Serialization.cs +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CustomEntity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Description)) - { - if (Description != null) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - else - { - writer.WriteNull("description"); - } - } - if (Optional.IsDefined(Type)) - { - if (Type != null) - { - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - } - else - { - writer.WriteNull("type"); - } - } - if (Optional.IsDefined(Subtype)) - { - if (Subtype != null) - { - writer.WritePropertyName("subtype"u8); - writer.WriteStringValue(Subtype); - } - else - { - writer.WriteNull("subtype"); - } - } - if (Optional.IsDefined(Id)) - { - if (Id != null) - { - writer.WritePropertyName("id"u8); - writer.WriteStringValue(Id); - } - else - { - writer.WriteNull("id"); - } - } - if (Optional.IsDefined(CaseSensitive)) - { - if (CaseSensitive != null) - { - writer.WritePropertyName("caseSensitive"u8); - writer.WriteBooleanValue(CaseSensitive.Value); - } - else - { - writer.WriteNull("caseSensitive"); - } - } - if (Optional.IsDefined(AccentSensitive)) - { - if (AccentSensitive != null) - { - writer.WritePropertyName("accentSensitive"u8); - writer.WriteBooleanValue(AccentSensitive.Value); - } - else - { - writer.WriteNull("accentSensitive"); - } - } - if (Optional.IsDefined(FuzzyEditDistance)) - { - if (FuzzyEditDistance != null) - { - writer.WritePropertyName("fuzzyEditDistance"u8); - writer.WriteNumberValue(FuzzyEditDistance.Value); - } - else - { - writer.WriteNull("fuzzyEditDistance"); - } - } - if (Optional.IsDefined(DefaultCaseSensitive)) - { - if (DefaultCaseSensitive != null) - { - writer.WritePropertyName("defaultCaseSensitive"u8); - writer.WriteBooleanValue(DefaultCaseSensitive.Value); - } - else - { - writer.WriteNull("defaultCaseSensitive"); - } - } - if (Optional.IsDefined(DefaultAccentSensitive)) - { - if (DefaultAccentSensitive != null) - { - writer.WritePropertyName("defaultAccentSensitive"u8); - writer.WriteBooleanValue(DefaultAccentSensitive.Value); - } - else - { - writer.WriteNull("defaultAccentSensitive"); - } - } - if (Optional.IsDefined(DefaultFuzzyEditDistance)) - { - if (DefaultFuzzyEditDistance != null) - { - writer.WritePropertyName("defaultFuzzyEditDistance"u8); - writer.WriteNumberValue(DefaultFuzzyEditDistance.Value); - } - else - { - writer.WriteNull("defaultFuzzyEditDistance"); - } - } - if (Optional.IsCollectionDefined(Aliases)) - { - if (Aliases != null) - { - writer.WritePropertyName("aliases"u8); - writer.WriteStartArray(); - foreach (var item in Aliases) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - else - { - writer.WriteNull("aliases"); - } - } - writer.WriteEndObject(); - } - - internal static CustomEntity DeserializeCustomEntity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string description = default; - string type = default; - string subtype = default; - string id = default; - bool? caseSensitive = default; - bool? accentSensitive = default; - int? fuzzyEditDistance = default; - bool? defaultCaseSensitive = default; - bool? defaultAccentSensitive = default; - int? defaultFuzzyEditDistance = default; - IList aliases = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - description = null; - continue; - } - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("type"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - type = null; - continue; - } - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("subtype"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - subtype = null; - continue; - } - subtype = property.Value.GetString(); - continue; - } - if (property.NameEquals("id"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - id = null; - continue; - } - id = property.Value.GetString(); - continue; - } - if (property.NameEquals("caseSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - caseSensitive = null; - continue; - } - caseSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("accentSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - accentSensitive = null; - continue; - } - accentSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("fuzzyEditDistance"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - fuzzyEditDistance = null; - continue; - } - fuzzyEditDistance = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("defaultCaseSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultCaseSensitive = null; - continue; - } - defaultCaseSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultAccentSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultAccentSensitive = null; - continue; - } - defaultAccentSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultFuzzyEditDistance"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultFuzzyEditDistance = null; - continue; - } - defaultFuzzyEditDistance = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("aliases"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - aliases = null; - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(CustomEntityAlias.DeserializeCustomEntityAlias(item)); - } - aliases = array; - continue; - } - } - return new CustomEntity( - name, - description, - type, - subtype, - id, - caseSensitive, - accentSensitive, - fuzzyEditDistance, - defaultCaseSensitive, - defaultAccentSensitive, - defaultFuzzyEditDistance, - aliases ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static CustomEntity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCustomEntity(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.cs deleted file mode 100644 index b9b0c03bb6ee..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntity.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// An object that contains information about the matches that were found, and related metadata. - public partial class CustomEntity - { - /// Initializes a new instance of . - /// The top-level entity descriptor. Matches in the skill output will be grouped by this name, and it should represent the "normalized" form of the text being found. - /// is null. - public CustomEntity(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - Aliases = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The top-level entity descriptor. Matches in the skill output will be grouped by this name, and it should represent the "normalized" form of the text being found. - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - /// Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to character casing. Sample case insensitive matches of "Microsoft" could be: microsoft, microSoft, MICROSOFT. - /// Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to accent. - /// Defaults to 0. Maximum value of 5. Denotes the acceptable number of divergent characters that would still constitute a match with the entity name. The smallest possible fuzziness for any given match is returned. For instance, if the edit distance is set to 3, "Windows10" would still match "Windows", "Windows10" and "Windows 7". When case sensitivity is set to false, case differences do NOT count towards fuzziness tolerance, but otherwise do. - /// Changes the default case sensitivity value for this entity. It be used to change the default value of all aliases caseSensitive values. - /// Changes the default accent sensitivity value for this entity. It be used to change the default value of all aliases accentSensitive values. - /// Changes the default fuzzy edit distance value for this entity. It can be used to change the default value of all aliases fuzzyEditDistance values. - /// An array of complex objects that can be used to specify alternative spellings or synonyms to the root entity name. - internal CustomEntity(string name, string description, string type, string subtype, string id, bool? caseSensitive, bool? accentSensitive, int? fuzzyEditDistance, bool? defaultCaseSensitive, bool? defaultAccentSensitive, int? defaultFuzzyEditDistance, IList aliases) - { - Name = name; - Description = description; - Type = type; - Subtype = subtype; - Id = id; - CaseSensitive = caseSensitive; - AccentSensitive = accentSensitive; - FuzzyEditDistance = fuzzyEditDistance; - DefaultCaseSensitive = defaultCaseSensitive; - DefaultAccentSensitive = defaultAccentSensitive; - DefaultFuzzyEditDistance = defaultFuzzyEditDistance; - Aliases = aliases; - } - - /// The top-level entity descriptor. Matches in the skill output will be grouped by this name, and it should represent the "normalized" form of the text being found. - public string Name { get; set; } - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - public string Description { get; set; } - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - public string Type { get; set; } - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - public string Subtype { get; set; } - /// This field can be used as a passthrough for custom metadata about the matched text(s). The value of this field will appear with every match of its entity in the skill output. - public string Id { get; set; } - /// Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to character casing. Sample case insensitive matches of "Microsoft" could be: microsoft, microSoft, MICROSOFT. - public bool? CaseSensitive { get; set; } - /// Defaults to false. Boolean value denoting whether comparisons with the entity name should be sensitive to accent. - public bool? AccentSensitive { get; set; } - /// Defaults to 0. Maximum value of 5. Denotes the acceptable number of divergent characters that would still constitute a match with the entity name. The smallest possible fuzziness for any given match is returned. For instance, if the edit distance is set to 3, "Windows10" would still match "Windows", "Windows10" and "Windows 7". When case sensitivity is set to false, case differences do NOT count towards fuzziness tolerance, but otherwise do. - public int? FuzzyEditDistance { get; set; } - /// Changes the default case sensitivity value for this entity. It be used to change the default value of all aliases caseSensitive values. - public bool? DefaultCaseSensitive { get; set; } - /// Changes the default accent sensitivity value for this entity. It be used to change the default value of all aliases accentSensitive values. - public bool? DefaultAccentSensitive { get; set; } - /// Changes the default fuzzy edit distance value for this entity. It can be used to change the default value of all aliases fuzzyEditDistance values. - public int? DefaultFuzzyEditDistance { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.Serialization.cs deleted file mode 100644 index 598308aeda0a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.Serialization.cs +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CustomEntityAlias : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("text"u8); - writer.WriteStringValue(Text); - if (Optional.IsDefined(CaseSensitive)) - { - if (CaseSensitive != null) - { - writer.WritePropertyName("caseSensitive"u8); - writer.WriteBooleanValue(CaseSensitive.Value); - } - else - { - writer.WriteNull("caseSensitive"); - } - } - if (Optional.IsDefined(AccentSensitive)) - { - if (AccentSensitive != null) - { - writer.WritePropertyName("accentSensitive"u8); - writer.WriteBooleanValue(AccentSensitive.Value); - } - else - { - writer.WriteNull("accentSensitive"); - } - } - if (Optional.IsDefined(FuzzyEditDistance)) - { - if (FuzzyEditDistance != null) - { - writer.WritePropertyName("fuzzyEditDistance"u8); - writer.WriteNumberValue(FuzzyEditDistance.Value); - } - else - { - writer.WriteNull("fuzzyEditDistance"); - } - } - writer.WriteEndObject(); - } - - internal static CustomEntityAlias DeserializeCustomEntityAlias(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string text = default; - bool? caseSensitive = default; - bool? accentSensitive = default; - int? fuzzyEditDistance = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - text = property.Value.GetString(); - continue; - } - if (property.NameEquals("caseSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - caseSensitive = null; - continue; - } - caseSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("accentSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - accentSensitive = null; - continue; - } - accentSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("fuzzyEditDistance"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - fuzzyEditDistance = null; - continue; - } - fuzzyEditDistance = property.Value.GetInt32(); - continue; - } - } - return new CustomEntityAlias(text, caseSensitive, accentSensitive, fuzzyEditDistance); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static CustomEntityAlias FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCustomEntityAlias(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.cs deleted file mode 100644 index d3d232546f11..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityAlias.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A complex object that can be used to specify alternative spellings or synonyms to the root entity name. - public partial class CustomEntityAlias - { - /// Initializes a new instance of . - /// The text of the alias. - /// is null. - public CustomEntityAlias(string text) - { - Argument.AssertNotNull(text, nameof(text)); - - Text = text; - } - - /// Initializes a new instance of . - /// The text of the alias. - /// Determine if the alias is case sensitive. - /// Determine if the alias is accent sensitive. - /// Determine the fuzzy edit distance of the alias. - internal CustomEntityAlias(string text, bool? caseSensitive, bool? accentSensitive, int? fuzzyEditDistance) - { - Text = text; - CaseSensitive = caseSensitive; - AccentSensitive = accentSensitive; - FuzzyEditDistance = fuzzyEditDistance; - } - - /// The text of the alias. - public string Text { get; set; } - /// Determine if the alias is case sensitive. - public bool? CaseSensitive { get; set; } - /// Determine if the alias is accent sensitive. - public bool? AccentSensitive { get; set; } - /// Determine the fuzzy edit distance of the alias. - public int? FuzzyEditDistance { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.Serialization.cs deleted file mode 100644 index b5ffdca4093b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.Serialization.cs +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CustomEntityLookupSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(EntitiesDefinitionUri)) - { - if (EntitiesDefinitionUri != null) - { - writer.WritePropertyName("entitiesDefinitionUri"u8); - writer.WriteStringValue(EntitiesDefinitionUri.AbsoluteUri); - } - else - { - writer.WriteNull("entitiesDefinitionUri"); - } - } - if (Optional.IsCollectionDefined(InlineEntitiesDefinition)) - { - if (InlineEntitiesDefinition != null) - { - writer.WritePropertyName("inlineEntitiesDefinition"u8); - writer.WriteStartArray(); - foreach (var item in InlineEntitiesDefinition) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - else - { - writer.WriteNull("inlineEntitiesDefinition"); - } - } - if (Optional.IsDefined(GlobalDefaultCaseSensitive)) - { - if (GlobalDefaultCaseSensitive != null) - { - writer.WritePropertyName("globalDefaultCaseSensitive"u8); - writer.WriteBooleanValue(GlobalDefaultCaseSensitive.Value); - } - else - { - writer.WriteNull("globalDefaultCaseSensitive"); - } - } - if (Optional.IsDefined(GlobalDefaultAccentSensitive)) - { - if (GlobalDefaultAccentSensitive != null) - { - writer.WritePropertyName("globalDefaultAccentSensitive"u8); - writer.WriteBooleanValue(GlobalDefaultAccentSensitive.Value); - } - else - { - writer.WriteNull("globalDefaultAccentSensitive"); - } - } - if (Optional.IsDefined(GlobalDefaultFuzzyEditDistance)) - { - if (GlobalDefaultFuzzyEditDistance != null) - { - writer.WritePropertyName("globalDefaultFuzzyEditDistance"u8); - writer.WriteNumberValue(GlobalDefaultFuzzyEditDistance.Value); - } - else - { - writer.WriteNull("globalDefaultFuzzyEditDistance"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static CustomEntityLookupSkill DeserializeCustomEntityLookupSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - CustomEntityLookupSkillLanguage? defaultLanguageCode = default; - Uri entitiesDefinitionUri = default; - IList inlineEntitiesDefinition = default; - bool? globalDefaultCaseSensitive = default; - bool? globalDefaultAccentSensitive = default; - int? globalDefaultFuzzyEditDistance = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new CustomEntityLookupSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("entitiesDefinitionUri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - entitiesDefinitionUri = null; - continue; - } - entitiesDefinitionUri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("inlineEntitiesDefinition"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - inlineEntitiesDefinition = null; - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(CustomEntity.DeserializeCustomEntity(item)); - } - inlineEntitiesDefinition = array; - continue; - } - if (property.NameEquals("globalDefaultCaseSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - globalDefaultCaseSensitive = null; - continue; - } - globalDefaultCaseSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("globalDefaultAccentSensitive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - globalDefaultAccentSensitive = null; - continue; - } - globalDefaultAccentSensitive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("globalDefaultFuzzyEditDistance"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - globalDefaultFuzzyEditDistance = null; - continue; - } - globalDefaultFuzzyEditDistance = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new CustomEntityLookupSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - entitiesDefinitionUri, - inlineEntitiesDefinition ?? new ChangeTrackingList(), - globalDefaultCaseSensitive, - globalDefaultAccentSensitive, - globalDefaultFuzzyEditDistance); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CustomEntityLookupSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCustomEntityLookupSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.cs deleted file mode 100644 index 75d2f3d1463d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomEntityLookupSkill.cs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill looks for text from a custom, user-defined list of words and phrases. - public partial class CustomEntityLookupSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public CustomEntityLookupSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - InlineEntitiesDefinition = new ChangeTrackingList(); - ODataType = "#Microsoft.Skills.Text.CustomEntityLookupSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// Path to a JSON or CSV file containing all the target text to match against. This entity definition is read at the beginning of an indexer run. Any updates to this file during an indexer run will not take effect until subsequent runs. This config must be accessible over HTTPS. - /// The inline CustomEntity definition. - /// A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, this value will be the default value. - /// A global flag for AccentSensitive. If AccentSensitive is not set in CustomEntity, this value will be the default value. - /// A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in CustomEntity, this value will be the default value. - internal CustomEntityLookupSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, CustomEntityLookupSkillLanguage? defaultLanguageCode, Uri entitiesDefinitionUri, IList inlineEntitiesDefinition, bool? globalDefaultCaseSensitive, bool? globalDefaultAccentSensitive, int? globalDefaultFuzzyEditDistance) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - EntitiesDefinitionUri = entitiesDefinitionUri; - InlineEntitiesDefinition = inlineEntitiesDefinition; - GlobalDefaultCaseSensitive = globalDefaultCaseSensitive; - GlobalDefaultAccentSensitive = globalDefaultAccentSensitive; - GlobalDefaultFuzzyEditDistance = globalDefaultFuzzyEditDistance; - ODataType = oDataType ?? "#Microsoft.Skills.Text.CustomEntityLookupSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public CustomEntityLookupSkillLanguage? DefaultLanguageCode { get; set; } - /// A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, this value will be the default value. - public bool? GlobalDefaultCaseSensitive { get; set; } - /// A global flag for AccentSensitive. If AccentSensitive is not set in CustomEntity, this value will be the default value. - public bool? GlobalDefaultAccentSensitive { get; set; } - /// A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in CustomEntity, this value will be the default value. - public int? GlobalDefaultFuzzyEditDistance { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.Serialization.cs deleted file mode 100644 index 61b820f527d6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.Serialization.cs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class CustomNormalizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(TokenFilters)) - { - writer.WritePropertyName("tokenFilters"u8); - writer.WriteStartArray(); - foreach (var item in TokenFilters) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(CharFilters)) - { - writer.WritePropertyName("charFilters"u8); - writer.WriteStartArray(); - foreach (var item in CharFilters) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static CustomNormalizer DeserializeCustomNormalizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList tokenFilters = default; - IList charFilters = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tokenFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new TokenFilterName(item.GetString())); - } - tokenFilters = array; - continue; - } - if (property.NameEquals("charFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new CharFilterName(item.GetString())); - } - charFilters = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new CustomNormalizer(odataType, name, tokenFilters ?? new ChangeTrackingList(), charFilters ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new CustomNormalizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeCustomNormalizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.cs deleted file mode 100644 index df041df222cc..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/CustomNormalizer.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Allows you to configure normalization for filterable, sortable, and facetable fields, which by default operate with strict matching. This is a user-defined configuration consisting of at least one or more filters, which modify the token that is stored. - public partial class CustomNormalizer : LexicalNormalizer - { - /// Initializes a new instance of . - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - /// is null. - public CustomNormalizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - TokenFilters = new ChangeTrackingList(); - CharFilters = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.CustomNormalizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of normalizer. - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - /// A list of token filters used to filter out or modify the input token. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. - /// A list of character filters used to prepare input text before it is processed. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. - internal CustomNormalizer(string oDataType, string name, IList tokenFilters, IList charFilters) : base(oDataType, name) - { - TokenFilters = tokenFilters; - CharFilters = charFilters; - ODataType = oDataType ?? "#Microsoft.Azure.Search.CustomNormalizer"; - } - - /// A list of token filters used to filter out or modify the input token. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. - public IList TokenFilters { get; } - /// A list of character filters used to prepare input text before it is processed. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. - public IList CharFilters { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.Serialization.cs deleted file mode 100644 index fcac3e6aab57..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DataChangeDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static DataChangeDetectionPolicy DeserializeDataChangeDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy": return HighWaterMarkChangeDetectionPolicy.DeserializeHighWaterMarkChangeDetectionPolicy(element); - case "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy": return SqlIntegratedChangeTrackingPolicy.DeserializeSqlIntegratedChangeTrackingPolicy(element); - } - } - return UnknownDataChangeDetectionPolicy.DeserializeUnknownDataChangeDetectionPolicy(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DataChangeDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDataChangeDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.cs deleted file mode 100644 index 553ef79394b1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataChangeDetectionPolicy.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for data change detection policies. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public partial class DataChangeDetectionPolicy - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of data change detection policy. - internal DataChangeDetectionPolicy(string oDataType) - { - ODataType = oDataType; - } - - /// A URI fragment specifying the type of data change detection policy. - internal string ODataType { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.Serialization.cs deleted file mode 100644 index 89318fb0f5c4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DataDeletionDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static DataDeletionDetectionPolicy DeserializeDataDeletionDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy": return NativeBlobSoftDeleteDeletionDetectionPolicy.DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(element); - case "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy": return SoftDeleteColumnDeletionDetectionPolicy.DeserializeSoftDeleteColumnDeletionDetectionPolicy(element); - } - } - return UnknownDataDeletionDetectionPolicy.DeserializeUnknownDataDeletionDetectionPolicy(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DataDeletionDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDataDeletionDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.cs deleted file mode 100644 index b586a85a237e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataDeletionDetectionPolicy.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for data deletion detection policies. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public partial class DataDeletionDetectionPolicy - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of data deletion detection policy. - internal DataDeletionDetectionPolicy(string oDataType) - { - ODataType = oDataType; - } - - /// A URI fragment specifying the type of data deletion detection policy. - internal string ODataType { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.Serialization.cs deleted file mode 100644 index 5a6f42b2660c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.Serialization.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class DataSourceCredentials : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ConnectionString)) - { - writer.WritePropertyName("connectionString"u8); - writer.WriteStringValue(ConnectionString); - } - writer.WriteEndObject(); - } - - internal static DataSourceCredentials DeserializeDataSourceCredentials(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string connectionString = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("connectionString"u8)) - { - connectionString = property.Value.GetString(); - continue; - } - } - return new DataSourceCredentials(connectionString); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DataSourceCredentials FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDataSourceCredentials(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.cs deleted file mode 100644 index 62c92b834ab5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DataSourceCredentials.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents credentials that can be used to connect to a datasource. - internal partial class DataSourceCredentials - { - /// Initializes a new instance of . - public DataSourceCredentials() - { - } - - /// Initializes a new instance of . - /// The connection string for the datasource. Set to `<unchanged>` (with brackets) if you don't want the connection string updated. Set to `<redacted>` if you want to remove the connection string value from the datasource. - internal DataSourceCredentials(string connectionString) - { - ConnectionString = connectionString; - } - - /// The connection string for the datasource. Set to `<unchanged>` (with brackets) if you don't want the connection string updated. Set to `<redacted>` if you want to remove the connection string value from the datasource. - public string ConnectionString { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.Serialization.cs deleted file mode 100644 index 61dde3059701..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.Serialization.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class DebugInfo - { - internal static DebugInfo DeserializeDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - QueryRewritesDebugInfo queryRewrites = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("queryRewrites"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - queryRewrites = QueryRewritesDebugInfo.DeserializeQueryRewritesDebugInfo(property.Value); - continue; - } - } - return new DebugInfo(queryRewrites); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.cs deleted file mode 100644 index b50ff0c0537f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DebugInfo.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Contains debugging information that can be used to further explore your search results. - public partial class DebugInfo - { - /// Initializes a new instance of . - internal DebugInfo() - { - } - - /// Initializes a new instance of . - /// Contains debugging information specific to query rewrites. - internal DebugInfo(QueryRewritesDebugInfo queryRewrites) - { - QueryRewrites = queryRewrites; - } - - /// Contains debugging information specific to query rewrites. - public QueryRewritesDebugInfo QueryRewrites { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.Serialization.cs deleted file mode 100644 index b9416c718edd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DefaultCognitiveServicesAccount : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static DefaultCognitiveServicesAccount DeserializeDefaultCognitiveServicesAccount(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - string description = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - } - return new DefaultCognitiveServicesAccount(odataType, description); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new DefaultCognitiveServicesAccount FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDefaultCognitiveServicesAccount(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.cs deleted file mode 100644 index 4004250759df..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DefaultCognitiveServicesAccount.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// An empty object that represents the default Azure AI service resource for a skillset. - public partial class DefaultCognitiveServicesAccount : CognitiveServicesAccount - { - /// Initializes a new instance of . - public DefaultCognitiveServicesAccount() - { - ODataType = "#Microsoft.Azure.Search.DefaultCognitiveServices"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - internal DefaultCognitiveServicesAccount(string oDataType, string description) : base(oDataType, description) - { - ODataType = oDataType ?? "#Microsoft.Azure.Search.DefaultCognitiveServices"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.Serialization.cs deleted file mode 100644 index 912a886e7b16..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.Serialization.cs +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DictionaryDecompounderTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("wordList"u8); - writer.WriteStartArray(); - foreach (var item in WordList) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(MinWordSize)) - { - writer.WritePropertyName("minWordSize"u8); - writer.WriteNumberValue(MinWordSize.Value); - } - if (Optional.IsDefined(MinSubwordSize)) - { - writer.WritePropertyName("minSubwordSize"u8); - writer.WriteNumberValue(MinSubwordSize.Value); - } - if (Optional.IsDefined(MaxSubwordSize)) - { - writer.WritePropertyName("maxSubwordSize"u8); - writer.WriteNumberValue(MaxSubwordSize.Value); - } - if (Optional.IsDefined(OnlyLongestMatch)) - { - writer.WritePropertyName("onlyLongestMatch"u8); - writer.WriteBooleanValue(OnlyLongestMatch.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static DictionaryDecompounderTokenFilter DeserializeDictionaryDecompounderTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList wordList = default; - int? minWordSize = default; - int? minSubwordSize = default; - int? maxSubwordSize = default; - bool? onlyLongestMatch = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("wordList"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - wordList = array; - continue; - } - if (property.NameEquals("minWordSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - minWordSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("minSubwordSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - minSubwordSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxSubwordSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxSubwordSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("onlyLongestMatch"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - onlyLongestMatch = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new DictionaryDecompounderTokenFilter( - odataType, - name, - wordList, - minWordSize, - minSubwordSize, - maxSubwordSize, - onlyLongestMatch); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new DictionaryDecompounderTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDictionaryDecompounderTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.cs deleted file mode 100644 index d8d49736c211..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DictionaryDecompounderTokenFilter.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Decomposes compound words found in many Germanic languages. This token filter is implemented using Apache Lucene. - public partial class DictionaryDecompounderTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The list of words to match against. - /// or is null. - public DictionaryDecompounderTokenFilter(string name, IEnumerable wordList) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(wordList, nameof(wordList)); - - WordList = wordList.ToList(); - ODataType = "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The list of words to match against. - /// The minimum word size. Only words longer than this get processed. Default is 5. Maximum is 300. - /// The minimum subword size. Only subwords longer than this are outputted. Default is 2. Maximum is 300. - /// The maximum subword size. Only subwords shorter than this are outputted. Default is 15. Maximum is 300. - /// A value indicating whether to add only the longest matching subword to the output. Default is false. - internal DictionaryDecompounderTokenFilter(string oDataType, string name, IList wordList, int? minWordSize, int? minSubwordSize, int? maxSubwordSize, bool? onlyLongestMatch) : base(oDataType, name) - { - WordList = wordList; - MinWordSize = minWordSize; - MinSubwordSize = minSubwordSize; - MaxSubwordSize = maxSubwordSize; - OnlyLongestMatch = onlyLongestMatch; - ODataType = oDataType ?? "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; - } - /// The minimum word size. Only words longer than this get processed. Default is 5. Maximum is 300. - public int? MinWordSize { get; set; } - /// The minimum subword size. Only subwords longer than this are outputted. Default is 2. Maximum is 300. - public int? MinSubwordSize { get; set; } - /// The maximum subword size. Only subwords shorter than this are outputted. Default is 15. Maximum is 300. - public int? MaxSubwordSize { get; set; } - /// A value indicating whether to add only the longest matching subword to the output. Default is false. - public bool? OnlyLongestMatch { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.Serialization.cs deleted file mode 100644 index 9db45fd0785e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.Serialization.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DistanceScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("distance"u8); - writer.WriteObjectValue(Parameters); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static DistanceScoringFunction DeserializeDistanceScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - DistanceScoringParameters distance = default; - string type = default; - string fieldName = default; - double boost = default; - ScoringFunctionInterpolation? interpolation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("distance"u8)) - { - distance = DistanceScoringParameters.DeserializeDistanceScoringParameters(property.Value); - continue; - } - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("boost"u8)) - { - boost = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("interpolation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - interpolation = property.Value.GetString().ToScoringFunctionInterpolation(); - continue; - } - } - return new DistanceScoringFunction(type, fieldName, boost, interpolation, distance); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new DistanceScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDistanceScoringFunction(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.cs deleted file mode 100644 index 1c6f26b8a4ac..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringFunction.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a function that boosts scores based on distance from a geographic location. - public partial class DistanceScoringFunction : ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - /// Parameter values for the distance scoring function. - internal DistanceScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation, DistanceScoringParameters parameters) : base(type, fieldName, boost, interpolation) - { - Parameters = parameters; - Type = type ?? "distance"; - } - - /// Parameter values for the distance scoring function. - public DistanceScoringParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.Serialization.cs deleted file mode 100644 index 9eab56fbf6fa..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DistanceScoringParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("referencePointParameter"u8); - writer.WriteStringValue(ReferencePointParameter); - writer.WritePropertyName("boostingDistance"u8); - writer.WriteNumberValue(BoostingDistance); - writer.WriteEndObject(); - } - - internal static DistanceScoringParameters DeserializeDistanceScoringParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string referencePointParameter = default; - double boostingDistance = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("referencePointParameter"u8)) - { - referencePointParameter = property.Value.GetString(); - continue; - } - if (property.NameEquals("boostingDistance"u8)) - { - boostingDistance = property.Value.GetDouble(); - continue; - } - } - return new DistanceScoringParameters(referencePointParameter, boostingDistance); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DistanceScoringParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDistanceScoringParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.cs deleted file mode 100644 index 9269044aa6fd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DistanceScoringParameters.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Provides parameter values to a distance scoring function. - public partial class DistanceScoringParameters - { - /// Initializes a new instance of . - /// The name of the parameter passed in search queries to specify the reference location. - /// The distance in kilometers from the reference location where the boosting range ends. - /// is null. - public DistanceScoringParameters(string referencePointParameter, double boostingDistance) - { - Argument.AssertNotNull(referencePointParameter, nameof(referencePointParameter)); - - ReferencePointParameter = referencePointParameter; - BoostingDistance = boostingDistance; - } - - /// The name of the parameter passed in search queries to specify the reference location. - public string ReferencePointParameter { get; set; } - /// The distance in kilometers from the reference location where the boosting range ends. - public double BoostingDistance { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.Serialization.cs deleted file mode 100644 index dcee507c5a2c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.Serialization.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class DocumentDebugInfo - { - internal static DocumentDebugInfo DeserializeDocumentDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SemanticDebugInfo semantic = default; - VectorsDebugInfo vectors = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("semantic"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - semantic = SemanticDebugInfo.DeserializeSemanticDebugInfo(property.Value); - continue; - } - if (property.NameEquals("vectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - vectors = VectorsDebugInfo.DeserializeVectorsDebugInfo(property.Value); - continue; - } - } - return new DocumentDebugInfo(semantic, vectors); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static DocumentDebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDocumentDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.cs deleted file mode 100644 index ca41ce4d1962..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentDebugInfo.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Contains debugging information that can be used to further explore your search results. - public partial class DocumentDebugInfo - { - /// Initializes a new instance of . - internal DocumentDebugInfo() - { - } - - /// Initializes a new instance of . - /// Contains debugging information specific to semantic ranking requests. - /// Contains debugging information specific to vector and hybrid search. - internal DocumentDebugInfo(SemanticDebugInfo semantic, VectorsDebugInfo vectors) - { - Semantic = semantic; - Vectors = vectors; - } - - /// Contains debugging information specific to semantic ranking requests. - public SemanticDebugInfo Semantic { get; } - /// Contains debugging information specific to vector and hybrid search. - public VectorsDebugInfo Vectors { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.Serialization.cs deleted file mode 100644 index 231d8b13ea9b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.Serialization.cs +++ /dev/null @@ -1,228 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DocumentExtractionSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ParsingMode)) - { - if (ParsingMode != null) - { - writer.WritePropertyName("parsingMode"u8); - writer.WriteStringValue(ParsingMode.Value.ToString()); - } - else - { - writer.WriteNull("parsingMode"); - } - } - if (Optional.IsDefined(DataToExtract)) - { - if (DataToExtract != null) - { - writer.WritePropertyName("dataToExtract"u8); - writer.WriteStringValue(DataToExtract.Value.ToString()); - } - else - { - writer.WriteNull("dataToExtract"); - } - } - if (Optional.IsCollectionDefined(Configuration)) - { - if (Configuration != null) - { - writer.WritePropertyName("configuration"u8); - writer.WriteStartObject(); - foreach (var item in Configuration) - { - writer.WritePropertyName(item.Key); - if (item.Value == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - else - { - writer.WriteNull("configuration"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static DocumentExtractionSkill DeserializeDocumentExtractionSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - BlobIndexerParsingMode? parsingMode = default; - BlobIndexerDataToExtract? dataToExtract = default; - IDictionary configuration = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("parsingMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - parsingMode = null; - continue; - } - parsingMode = new BlobIndexerParsingMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("dataToExtract"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dataToExtract = null; - continue; - } - dataToExtract = new BlobIndexerDataToExtract(property.Value.GetString()); - continue; - } - if (property.NameEquals("configuration"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - configuration = null; - continue; - } - Dictionary dictionary = new Dictionary(); - foreach (var property0 in property.Value.EnumerateObject()) - { - if (property0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property0.Name, null); - } - else - { - dictionary.Add(property0.Name, property0.Value.GetObject()); - } - } - configuration = dictionary; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new DocumentExtractionSkill( - odataType, - name, - description, - context, - inputs, - outputs, - parsingMode, - dataToExtract, - configuration ?? new ChangeTrackingDictionary()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new DocumentExtractionSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDocumentExtractionSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.cs deleted file mode 100644 index 089347fe6255..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentExtractionSkill.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that extracts content from a file within the enrichment pipeline. - public partial class DocumentExtractionSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public DocumentExtractionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - Configuration = new ChangeTrackingDictionary(); - ODataType = "#Microsoft.Skills.Util.DocumentExtractionSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The parsingMode for the skill. Will be set to 'default' if not defined. - /// The type of data to be extracted for the skill. Will be set to 'contentAndMetadata' if not defined. - /// A dictionary of configurations for the skill. - internal DocumentExtractionSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, BlobIndexerParsingMode? parsingMode, BlobIndexerDataToExtract? dataToExtract, IDictionary configuration) : base(oDataType, name, description, context, inputs, outputs) - { - ParsingMode = parsingMode; - DataToExtract = dataToExtract; - Configuration = configuration; - ODataType = oDataType ?? "#Microsoft.Skills.Util.DocumentExtractionSkill"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.Serialization.cs deleted file mode 100644 index 91f1e56a192c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.Serialization.cs +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class DocumentIntelligenceLayoutSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(OutputMode)) - { - if (OutputMode != null) - { - writer.WritePropertyName("outputMode"u8); - writer.WriteStringValue(OutputMode.Value.ToString()); - } - else - { - writer.WriteNull("outputMode"); - } - } - if (Optional.IsDefined(MarkdownHeaderDepth)) - { - if (MarkdownHeaderDepth != null) - { - writer.WritePropertyName("markdownHeaderDepth"u8); - writer.WriteStringValue(MarkdownHeaderDepth.Value.ToString()); - } - else - { - writer.WriteNull("markdownHeaderDepth"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static DocumentIntelligenceLayoutSkill DeserializeDocumentIntelligenceLayoutSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - DocumentIntelligenceLayoutSkillOutputMode? outputMode = default; - DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? markdownHeaderDepth = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("outputMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - outputMode = null; - continue; - } - outputMode = new DocumentIntelligenceLayoutSkillOutputMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("markdownHeaderDepth"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - markdownHeaderDepth = null; - continue; - } - markdownHeaderDepth = new DocumentIntelligenceLayoutSkillMarkdownHeaderDepth(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new DocumentIntelligenceLayoutSkill( - odataType, - name, - description, - context, - inputs, - outputs, - outputMode, - markdownHeaderDepth); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new DocumentIntelligenceLayoutSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeDocumentIntelligenceLayoutSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.cs deleted file mode 100644 index 75eafce1323a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/DocumentIntelligenceLayoutSkill.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that extracts content and layout information (as markdown), via Azure AI Services, from files within the enrichment pipeline. - public partial class DocumentIntelligenceLayoutSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public DocumentIntelligenceLayoutSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. - /// The depth of headers in the markdown output. Default is h6. - internal DocumentIntelligenceLayoutSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, DocumentIntelligenceLayoutSkillOutputMode? outputMode, DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? markdownHeaderDepth) : base(oDataType, name, description, context, inputs, outputs) - { - OutputMode = outputMode; - MarkdownHeaderDepth = markdownHeaderDepth; - ODataType = oDataType ?? "#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill"; - } - - /// Controls the cardinality of the output produced by the skill. Default is 'oneToMany'. - public DocumentIntelligenceLayoutSkillOutputMode? OutputMode { get; set; } - /// The depth of headers in the markdown output. Default is h6. - public DocumentIntelligenceLayoutSkillMarkdownHeaderDepth? MarkdownHeaderDepth { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.Serialization.cs deleted file mode 100644 index cfcac905fb74..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.Serialization.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class EdgeNGramTokenFilter : IUtf8JsonSerializable - { - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new EdgeNGramTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeEdgeNGramTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.cs deleted file mode 100644 index 72465c201465..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilter.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Generates n-grams of the given size(s) starting from the front or the back of an input token. This token filter is implemented using Apache Lucene. - public partial class EdgeNGramTokenFilter : TokenFilter - { - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.Serialization.cs deleted file mode 100644 index 608f2d2dd27f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.Serialization.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class EdgeNGramTokenFilterSideExtensions - { - public static string ToSerialString(this EdgeNGramTokenFilterSide value) => value switch - { - EdgeNGramTokenFilterSide.Front => "front", - EdgeNGramTokenFilterSide.Back => "back", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown EdgeNGramTokenFilterSide value.") - }; - - public static EdgeNGramTokenFilterSide ToEdgeNGramTokenFilterSide(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "front")) return EdgeNGramTokenFilterSide.Front; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "back")) return EdgeNGramTokenFilterSide.Back; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown EdgeNGramTokenFilterSide value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.cs deleted file mode 100644 index 16f041ec7a65..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenFilterSide.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies which side of the input an n-gram should be generated from. - public enum EdgeNGramTokenFilterSide - { - /// Specifies that the n-gram should be generated from the front of the input. - Front, - /// Specifies that the n-gram should be generated from the back of the input. - Back - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.Serialization.cs deleted file mode 100644 index 68ef5c425c2d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.Serialization.cs +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class EdgeNGramTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MinGram)) - { - writer.WritePropertyName("minGram"u8); - writer.WriteNumberValue(MinGram.Value); - } - if (Optional.IsDefined(MaxGram)) - { - writer.WritePropertyName("maxGram"u8); - writer.WriteNumberValue(MaxGram.Value); - } - if (Optional.IsCollectionDefined(TokenChars)) - { - writer.WritePropertyName("tokenChars"u8); - writer.WriteStartArray(); - foreach (var item in TokenChars) - { - writer.WriteStringValue(item.ToSerialString()); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static EdgeNGramTokenizer DeserializeEdgeNGramTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? minGram = default; - int? maxGram = default; - IList tokenChars = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("minGram"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - minGram = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxGram"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxGram = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("tokenChars"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString().ToTokenCharacterKind()); - } - tokenChars = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new EdgeNGramTokenizer(odataType, name, minGram, maxGram, tokenChars ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new EdgeNGramTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeEdgeNGramTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.cs deleted file mode 100644 index b3472769bca9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EdgeNGramTokenizer.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Tokenizes the input from an edge into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. - public partial class EdgeNGramTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public EdgeNGramTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - TokenChars = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.EdgeNGramTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. - /// The maximum n-gram length. Default is 2. Maximum is 300. - /// Character classes to keep in the tokens. - internal EdgeNGramTokenizer(string oDataType, string name, int? minGram, int? maxGram, IList tokenChars) : base(oDataType, name) - { - MinGram = minGram; - MaxGram = maxGram; - TokenChars = tokenChars; - ODataType = oDataType ?? "#Microsoft.Azure.Search.EdgeNGramTokenizer"; - } - - /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. - public int? MinGram { get; set; } - /// The maximum n-gram length. Default is 2. Maximum is 300. - public int? MaxGram { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.Serialization.cs deleted file mode 100644 index d35e656d79f8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.Serialization.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ElisionTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Articles)) - { - writer.WritePropertyName("articles"u8); - writer.WriteStartArray(); - foreach (var item in Articles) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static ElisionTokenFilter DeserializeElisionTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList articles = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("articles"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - articles = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new ElisionTokenFilter(odataType, name, articles ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ElisionTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeElisionTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.cs deleted file mode 100644 index 3379a97d3022..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ElisionTokenFilter.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). This token filter is implemented using Apache Lucene. - public partial class ElisionTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public ElisionTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Articles = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.ElisionTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The set of articles to remove. - internal ElisionTokenFilter(string oDataType, string name, IList articles) : base(oDataType, name) - { - Articles = articles; - ODataType = oDataType ?? "#Microsoft.Azure.Search.ElisionTokenFilter"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.Serialization.cs deleted file mode 100644 index 222c241adc31..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.Serialization.cs +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class EntityLinkingSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(MinimumPrecision)) - { - if (MinimumPrecision != null) - { - writer.WritePropertyName("minimumPrecision"u8); - writer.WriteNumberValue(MinimumPrecision.Value); - } - else - { - writer.WriteNull("minimumPrecision"); - } - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static EntityLinkingSkill DeserializeEntityLinkingSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string defaultLanguageCode = default; - double? minimumPrecision = default; - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = property.Value.GetString(); - continue; - } - if (property.NameEquals("minimumPrecision"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - minimumPrecision = null; - continue; - } - minimumPrecision = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new EntityLinkingSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - minimumPrecision, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new EntityLinkingSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeEntityLinkingSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.cs deleted file mode 100644 index e0d3093a29bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityLinkingSkill.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Using the Text Analytics API, extracts linked entities from text. - public partial class EntityLinkingSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public EntityLinkingSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - internal EntityLinkingSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string defaultLanguageCode, double? minimumPrecision, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - MinimumPrecision = minimumPrecision; - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public string DefaultLanguageCode { get; set; } - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - public double? MinimumPrecision { get; set; } - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - public string ModelVersion { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.Serialization.cs deleted file mode 100644 index 58b19d6f8530..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.Serialization.cs +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class EntityRecognitionSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Categories)) - { - writer.WritePropertyName("categories"u8); - writer.WriteStartArray(); - foreach (var item in Categories) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(IncludeTypelessEntities)) - { - if (IncludeTypelessEntities != null) - { - writer.WritePropertyName("includeTypelessEntities"u8); - writer.WriteBooleanValue(IncludeTypelessEntities.Value); - } - else - { - writer.WriteNull("includeTypelessEntities"); - } - } - if (Optional.IsDefined(MinimumPrecision)) - { - if (MinimumPrecision != null) - { - writer.WritePropertyName("minimumPrecision"u8); - writer.WriteNumberValue(MinimumPrecision.Value); - } - else - { - writer.WriteNull("minimumPrecision"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static EntityRecognitionSkill DeserializeEntityRecognitionSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList categories = default; - EntityRecognitionSkillLanguage? defaultLanguageCode = default; - bool? includeTypelessEntities = default; - double? minimumPrecision = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("categories"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new EntityCategory(item.GetString())); - } - categories = array; - continue; - } - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new EntityRecognitionSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("includeTypelessEntities"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - includeTypelessEntities = null; - continue; - } - includeTypelessEntities = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("minimumPrecision"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - minimumPrecision = null; - continue; - } - minimumPrecision = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new EntityRecognitionSkill( - odataType, - name, - description, - context, - inputs, - outputs, - categories ?? new ChangeTrackingList(), - defaultLanguageCode, - includeTypelessEntities, - minimumPrecision); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new EntityRecognitionSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeEntityRecognitionSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.cs deleted file mode 100644 index 958483cd4a8c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkill.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// This skill is deprecated. Use the V3.EntityRecognitionSkill instead. - public partial class EntityRecognitionSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A list of entity categories that should be extracted. - /// A value indicating which language code to use. Default is `en`. - /// Determines whether or not to include entities which are well known but don't conform to a pre-defined type. If this configuration is not set (default), set to null or set to false, entities which don't conform to one of the pre-defined types will not be surfaced. - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - internal EntityRecognitionSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, IList categories, EntityRecognitionSkillLanguage? defaultLanguageCode, bool? includeTypelessEntities, double? minimumPrecision) : base(oDataType, name, description, context, inputs, outputs) - { - Categories = categories; - DefaultLanguageCode = defaultLanguageCode; - IncludeTypelessEntities = includeTypelessEntities; - MinimumPrecision = minimumPrecision; - ODataType = oDataType ?? "#Microsoft.Skills.Text.EntityRecognitionSkill"; - } - /// A value indicating which language code to use. Default is `en`. - public EntityRecognitionSkillLanguage? DefaultLanguageCode { get; set; } - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - public double? MinimumPrecision { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.Serialization.cs deleted file mode 100644 index 786ee4af269a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.Serialization.cs +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class EntityRecognitionSkillV3 : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Categories)) - { - writer.WritePropertyName("categories"u8); - writer.WriteStartArray(); - foreach (var item in Categories) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(MinimumPrecision)) - { - if (MinimumPrecision != null) - { - writer.WritePropertyName("minimumPrecision"u8); - writer.WriteNumberValue(MinimumPrecision.Value); - } - else - { - writer.WriteNull("minimumPrecision"); - } - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static EntityRecognitionSkillV3 DeserializeEntityRecognitionSkillV3(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList categories = default; - string defaultLanguageCode = default; - double? minimumPrecision = default; - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("categories"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - categories = array; - continue; - } - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = property.Value.GetString(); - continue; - } - if (property.NameEquals("minimumPrecision"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - minimumPrecision = null; - continue; - } - minimumPrecision = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new EntityRecognitionSkillV3( - odataType, - name, - description, - context, - inputs, - outputs, - categories ?? new ChangeTrackingList(), - defaultLanguageCode, - minimumPrecision, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new EntityRecognitionSkillV3 FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeEntityRecognitionSkillV3(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.cs deleted file mode 100644 index aea88cab205a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/EntityRecognitionSkillV3.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Using the Text Analytics API, extracts entities of different types from text. - internal partial class EntityRecognitionSkillV3 : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public EntityRecognitionSkillV3(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - Categories = new ChangeTrackingList(); - ODataType = "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A list of entity categories that should be extracted. - /// A value indicating which language code to use. Default is `en`. - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - /// The version of the model to use when calling the Text Analytics API. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - internal EntityRecognitionSkillV3(string oDataType, string name, string description, string context, IList inputs, IList outputs, IList categories, string defaultLanguageCode, double? minimumPrecision, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - Categories = categories; - DefaultLanguageCode = defaultLanguageCode; - MinimumPrecision = minimumPrecision; - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; - } - - /// A list of entity categories that should be extracted. - public IList Categories { get; } - /// A value indicating which language code to use. Default is `en`. - public string DefaultLanguageCode { get; set; } - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - public double? MinimumPrecision { get; set; } - /// The version of the model to use when calling the Text Analytics API. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - public string ModelVersion { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.Serialization.cs deleted file mode 100644 index 79d308cd886a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.Serialization.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class ErrorAdditionalInfo - { - internal static ErrorAdditionalInfo DeserializeErrorAdditionalInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string type = default; - object info = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("info"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - info = property.Value.GetObject(); - continue; - } - } - return new ErrorAdditionalInfo(type, info); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ErrorAdditionalInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeErrorAdditionalInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.cs deleted file mode 100644 index b8f219dc1c82..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorAdditionalInfo.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The resource management error additional info. - internal partial class ErrorAdditionalInfo - { - /// Initializes a new instance of . - internal ErrorAdditionalInfo() - { - } - - /// Initializes a new instance of . - /// The additional info type. - /// The additional info. - internal ErrorAdditionalInfo(string type, object info) - { - Type = type; - Info = info; - } - - /// The additional info type. - public string Type { get; } - /// The additional info. - public object Info { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.Serialization.cs deleted file mode 100644 index 0ad29ada29cd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.Serialization.cs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class ErrorDetail - { - internal static ErrorDetail DeserializeErrorDetail(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string code = default; - string message = default; - string target = default; - IReadOnlyList details = default; - IReadOnlyList additionalInfo = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("code"u8)) - { - code = property.Value.GetString(); - continue; - } - if (property.NameEquals("message"u8)) - { - message = property.Value.GetString(); - continue; - } - if (property.NameEquals("target"u8)) - { - target = property.Value.GetString(); - continue; - } - if (property.NameEquals("details"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(DeserializeErrorDetail(item)); - } - details = array; - continue; - } - if (property.NameEquals("additionalInfo"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(ErrorAdditionalInfo.DeserializeErrorAdditionalInfo(item)); - } - additionalInfo = array; - continue; - } - } - return new ErrorDetail(code, message, target, details ?? new ChangeTrackingList(), additionalInfo ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ErrorDetail FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeErrorDetail(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.cs deleted file mode 100644 index 9f804b9ebdab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorDetail.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// The error detail. - internal partial class ErrorDetail - { - /// Initializes a new instance of . - internal ErrorDetail() - { - Details = new ChangeTrackingList(); - AdditionalInfo = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The error code. - /// The error message. - /// The error target. - /// The error details. - /// The error additional info. - internal ErrorDetail(string code, string message, string target, IReadOnlyList details, IReadOnlyList additionalInfo) - { - Code = code; - Message = message; - Target = target; - Details = details; - AdditionalInfo = additionalInfo; - } - - /// The error code. - public string Code { get; } - /// The error message. - public string Message { get; } - /// The error target. - public string Target { get; } - /// The error details. - public IReadOnlyList Details { get; } - /// The error additional info. - public IReadOnlyList AdditionalInfo { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.Serialization.cs deleted file mode 100644 index c145daf329a3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.Serialization.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class ErrorResponse - { - internal static ErrorResponse DeserializeErrorResponse(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ErrorDetail error = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("error"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - error = ErrorDetail.DeserializeErrorDetail(property.Value); - continue; - } - } - return new ErrorResponse(error); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ErrorResponse FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeErrorResponse(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.cs deleted file mode 100644 index b7e178bb8942..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ErrorResponse.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). - internal partial class ErrorResponse - { - /// Initializes a new instance of . - internal ErrorResponse() - { - } - - /// Initializes a new instance of . - /// The error object. - internal ErrorResponse(ErrorDetail error) - { - Error = error; - } - - /// The error object. - public ErrorDetail Error { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs deleted file mode 100644 index c6e1f533bb8a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ExhaustiveKnnAlgorithmConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("exhaustiveKnnParameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static ExhaustiveKnnAlgorithmConfiguration DeserializeExhaustiveKnnAlgorithmConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ExhaustiveKnnParameters exhaustiveKnnParameters = default; - string name = default; - VectorSearchAlgorithmKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("exhaustiveKnnParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustiveKnnParameters = ExhaustiveKnnParameters.DeserializeExhaustiveKnnParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchAlgorithmKind(property.Value.GetString()); - continue; - } - } - return new ExhaustiveKnnAlgorithmConfiguration(name, kind, exhaustiveKnnParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ExhaustiveKnnAlgorithmConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeExhaustiveKnnAlgorithmConfiguration(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.cs deleted file mode 100644 index 4cc2710a1980..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnAlgorithmConfiguration.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains configuration options specific to the exhaustive KNN algorithm used during querying, which will perform brute-force search across the entire vector index. - public partial class ExhaustiveKnnAlgorithmConfiguration : VectorSearchAlgorithmConfiguration - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - public ExhaustiveKnnAlgorithmConfiguration(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Kind = VectorSearchAlgorithmKind.ExhaustiveKnn; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of algorithm being configured for use with vector search. - /// Contains the parameters specific to exhaustive KNN algorithm. - internal ExhaustiveKnnAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, ExhaustiveKnnParameters parameters) : base(name, kind) - { - Parameters = parameters; - Kind = kind; - } - - /// Contains the parameters specific to exhaustive KNN algorithm. - public ExhaustiveKnnParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.Serialization.cs deleted file mode 100644 index a70600ccdedf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.Serialization.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ExhaustiveKnnParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Metric)) - { - if (Metric != null) - { - writer.WritePropertyName("metric"u8); - writer.WriteStringValue(Metric.Value.ToString()); - } - else - { - writer.WriteNull("metric"); - } - } - writer.WriteEndObject(); - } - - internal static ExhaustiveKnnParameters DeserializeExhaustiveKnnParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - VectorSearchAlgorithmMetric? metric = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("metric"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - metric = null; - continue; - } - metric = new VectorSearchAlgorithmMetric(property.Value.GetString()); - continue; - } - } - return new ExhaustiveKnnParameters(metric); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ExhaustiveKnnParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeExhaustiveKnnParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.cs deleted file mode 100644 index a8d59ff3d16a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ExhaustiveKnnParameters.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains the parameters specific to exhaustive KNN algorithm. - public partial class ExhaustiveKnnParameters - { - /// Initializes a new instance of . - public ExhaustiveKnnParameters() - { - } - - /// Initializes a new instance of . - /// The similarity metric to use for vector comparisons. - internal ExhaustiveKnnParameters(VectorSearchAlgorithmMetric? metric) - { - Metric = metric; - } - - /// The similarity metric to use for vector comparisons. - public VectorSearchAlgorithmMetric? Metric { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.Serialization.cs deleted file mode 100644 index 857b91bbd87d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.Serialization.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class FacetResult - { - internal static FacetResult DeserializeFacetResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - long? count = default; - IReadOnlyDictionary> searchFacets = default; - IReadOnlyDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("count"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - count = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("@search.facets"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - Dictionary> dictionary = new Dictionary>(); - foreach (var property0 in property.Value.EnumerateObject()) - { - if (property0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property0.Name, null); - } - else - { - List array = new List(); - foreach (var item in property0.Value.EnumerateArray()) - { - array.Add(DeserializeFacetResult(item)); - } - dictionary.Add(property0.Name, array); - } - } - searchFacets = dictionary; - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new FacetResult(count, searchFacets ?? new ChangeTrackingDictionary>(), additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static FacetResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeFacetResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.cs deleted file mode 100644 index fa48a3d346a9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FacetResult.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// A single bucket of a facet query result. Reports the number of documents with a field value falling within a particular range or having a particular value or interval. - public partial class FacetResult - { - /// Initializes a new instance of . - internal FacetResult() - { - Facets = new ChangeTrackingDictionary>(); - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The approximate count of documents falling within the bucket described by this facet. - /// The nested facet query results for the search operation, organized as a collection of buckets for each faceted field; null if the query did not contain any nested facets. - /// Additional Properties. - internal FacetResult(long? count, IReadOnlyDictionary> facets, IReadOnlyDictionary additionalProperties) - { - Count = count; - Facets = facets; - AdditionalProperties = additionalProperties; - } - - /// The approximate count of documents falling within the bucket described by this facet. - public long? Count { get; } - /// The nested facet query results for the search operation, organized as a collection of buckets for each faceted field; null if the query did not contain any nested facets. - public IReadOnlyDictionary> Facets { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.Serialization.cs deleted file mode 100644 index ee00cb0d2229..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.Serialization.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class FieldMapping : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("sourceFieldName"u8); - writer.WriteStringValue(SourceFieldName); - if (Optional.IsDefined(TargetFieldName)) - { - writer.WritePropertyName("targetFieldName"u8); - writer.WriteStringValue(TargetFieldName); - } - if (Optional.IsDefined(MappingFunction)) - { - if (MappingFunction != null) - { - writer.WritePropertyName("mappingFunction"u8); - writer.WriteObjectValue(MappingFunction); - } - else - { - writer.WriteNull("mappingFunction"); - } - } - writer.WriteEndObject(); - } - - internal static FieldMapping DeserializeFieldMapping(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string sourceFieldName = default; - string targetFieldName = default; - FieldMappingFunction mappingFunction = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("sourceFieldName"u8)) - { - sourceFieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("targetFieldName"u8)) - { - targetFieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("mappingFunction"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - mappingFunction = null; - continue; - } - mappingFunction = FieldMappingFunction.DeserializeFieldMappingFunction(property.Value); - continue; - } - } - return new FieldMapping(sourceFieldName, targetFieldName, mappingFunction); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static FieldMapping FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeFieldMapping(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.cs deleted file mode 100644 index ca65fd52d1be..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMapping.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a mapping between a field in a data source and a target field in an index. - public partial class FieldMapping - { - /// Initializes a new instance of . - /// The name of the field in the data source. - /// is null. - public FieldMapping(string sourceFieldName) - { - Argument.AssertNotNull(sourceFieldName, nameof(sourceFieldName)); - - SourceFieldName = sourceFieldName; - } - - /// Initializes a new instance of . - /// The name of the field in the data source. - /// The name of the target field in the index. Same as the source field name by default. - /// A function to apply to each source field value before indexing. - internal FieldMapping(string sourceFieldName, string targetFieldName, FieldMappingFunction mappingFunction) - { - SourceFieldName = sourceFieldName; - TargetFieldName = targetFieldName; - MappingFunction = mappingFunction; - } - - /// The name of the field in the data source. - public string SourceFieldName { get; set; } - /// The name of the target field in the index. Same as the source field name by default. - public string TargetFieldName { get; set; } - /// A function to apply to each source field value before indexing. - public FieldMappingFunction MappingFunction { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.Serialization.cs deleted file mode 100644 index b92548ccc926..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.Serialization.cs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class FieldMappingFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsCollectionDefined(Parameters)) - { - if (Parameters != null) - { - writer.WritePropertyName("parameters"u8); - writer.WriteStartObject(); - foreach (var item in Parameters) - { - writer.WritePropertyName(item.Key); - if (item.Value == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - else - { - writer.WriteNull("parameters"); - } - } - writer.WriteEndObject(); - } - - internal static FieldMappingFunction DeserializeFieldMappingFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - IDictionary parameters = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("parameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - parameters = null; - continue; - } - Dictionary dictionary = new Dictionary(); - foreach (var property0 in property.Value.EnumerateObject()) - { - if (property0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property0.Name, null); - } - else - { - dictionary.Add(property0.Name, property0.Value.GetObject()); - } - } - parameters = dictionary; - continue; - } - } - return new FieldMappingFunction(name, parameters ?? new ChangeTrackingDictionary()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static FieldMappingFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeFieldMappingFunction(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.cs deleted file mode 100644 index c9a14812ac0c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FieldMappingFunction.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a function that transforms a value from a data source before indexing. - public partial class FieldMappingFunction - { - /// Initializes a new instance of . - /// The name of the field mapping function. - /// is null. - public FieldMappingFunction(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - Parameters = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The name of the field mapping function. - /// A dictionary of parameter name/value pairs to pass to the function. Each value must be of a primitive type. - internal FieldMappingFunction(string name, IDictionary parameters) - { - Name = name; - Parameters = parameters; - } - - /// The name of the field mapping function. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.Serialization.cs deleted file mode 100644 index 49f28ed5089d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.Serialization.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class FreshnessScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("freshness"u8); - writer.WriteObjectValue(Parameters); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static FreshnessScoringFunction DeserializeFreshnessScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - FreshnessScoringParameters freshness = default; - string type = default; - string fieldName = default; - double boost = default; - ScoringFunctionInterpolation? interpolation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("freshness"u8)) - { - freshness = FreshnessScoringParameters.DeserializeFreshnessScoringParameters(property.Value); - continue; - } - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("boost"u8)) - { - boost = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("interpolation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - interpolation = property.Value.GetString().ToScoringFunctionInterpolation(); - continue; - } - } - return new FreshnessScoringFunction(type, fieldName, boost, interpolation, freshness); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new FreshnessScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeFreshnessScoringFunction(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.cs deleted file mode 100644 index d6f7f137659f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringFunction.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a function that boosts scores based on the value of a date-time field. - public partial class FreshnessScoringFunction : ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - /// Parameter values for the freshness scoring function. - internal FreshnessScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation, FreshnessScoringParameters parameters) : base(type, fieldName, boost, interpolation) - { - Parameters = parameters; - Type = type ?? "freshness"; - } - - /// Parameter values for the freshness scoring function. - public FreshnessScoringParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.Serialization.cs deleted file mode 100644 index 5842580d72be..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class FreshnessScoringParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("boostingDuration"u8); - writer.WriteStringValue(BoostingDuration, "P"); - writer.WriteEndObject(); - } - - internal static FreshnessScoringParameters DeserializeFreshnessScoringParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TimeSpan boostingDuration = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("boostingDuration"u8)) - { - boostingDuration = property.Value.GetTimeSpan("P"); - continue; - } - } - return new FreshnessScoringParameters(boostingDuration); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static FreshnessScoringParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeFreshnessScoringParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.cs deleted file mode 100644 index 721a5a41f499..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/FreshnessScoringParameters.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Provides parameter values to a freshness scoring function. - public partial class FreshnessScoringParameters - { - /// Initializes a new instance of . - /// The expiration period after which boosting will stop for a particular document. - public FreshnessScoringParameters(TimeSpan boostingDuration) - { - BoostingDuration = boostingDuration; - } - - /// The expiration period after which boosting will stop for a particular document. - public TimeSpan BoostingDuration { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.Serialization.cs deleted file mode 100644 index 1dc2bfd5cc64..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class HighWaterMarkChangeDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("highWaterMarkColumnName"u8); - writer.WriteStringValue(HighWaterMarkColumnName); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static HighWaterMarkChangeDetectionPolicy DeserializeHighWaterMarkChangeDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string highWaterMarkColumnName = default; - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("highWaterMarkColumnName"u8)) - { - highWaterMarkColumnName = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new HighWaterMarkChangeDetectionPolicy(odataType, highWaterMarkColumnName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new HighWaterMarkChangeDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeHighWaterMarkChangeDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.cs deleted file mode 100644 index 2e0e3f66f9ce..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HighWaterMarkChangeDetectionPolicy.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a data change detection policy that captures changes based on the value of a high water mark column. - public partial class HighWaterMarkChangeDetectionPolicy : DataChangeDetectionPolicy - { - /// Initializes a new instance of . - /// The name of the high water mark column. - /// is null. - public HighWaterMarkChangeDetectionPolicy(string highWaterMarkColumnName) - { - Argument.AssertNotNull(highWaterMarkColumnName, nameof(highWaterMarkColumnName)); - - HighWaterMarkColumnName = highWaterMarkColumnName; - ODataType = "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of data change detection policy. - /// The name of the high water mark column. - internal HighWaterMarkChangeDetectionPolicy(string oDataType, string highWaterMarkColumnName) : base(oDataType) - { - HighWaterMarkColumnName = highWaterMarkColumnName; - ODataType = oDataType ?? "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; - } - - /// The name of the high water mark column. - public string HighWaterMarkColumnName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.Serialization.cs deleted file mode 100644 index 2c63b205606d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class HnswAlgorithmConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("hnswParameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static HnswAlgorithmConfiguration DeserializeHnswAlgorithmConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - HnswParameters hnswParameters = default; - string name = default; - VectorSearchAlgorithmKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("hnswParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - hnswParameters = HnswParameters.DeserializeHnswParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchAlgorithmKind(property.Value.GetString()); - continue; - } - } - return new HnswAlgorithmConfiguration(name, kind, hnswParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new HnswAlgorithmConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeHnswAlgorithmConfiguration(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.cs deleted file mode 100644 index 59aeb87cc763..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswAlgorithmConfiguration.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains configuration options specific to the HNSW approximate nearest neighbors algorithm used during indexing and querying. The HNSW algorithm offers a tunable trade-off between search speed and accuracy. - public partial class HnswAlgorithmConfiguration : VectorSearchAlgorithmConfiguration - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - public HnswAlgorithmConfiguration(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Kind = VectorSearchAlgorithmKind.Hnsw; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of algorithm being configured for use with vector search. - /// Contains the parameters specific to HNSW algorithm. - internal HnswAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, HnswParameters parameters) : base(name, kind) - { - Parameters = parameters; - Kind = kind; - } - - /// Contains the parameters specific to HNSW algorithm. - public HnswParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.Serialization.cs deleted file mode 100644 index 1b2627741861..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.Serialization.cs +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class HnswParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(M)) - { - if (M != null) - { - writer.WritePropertyName("m"u8); - writer.WriteNumberValue(M.Value); - } - else - { - writer.WriteNull("m"); - } - } - if (Optional.IsDefined(EfConstruction)) - { - if (EfConstruction != null) - { - writer.WritePropertyName("efConstruction"u8); - writer.WriteNumberValue(EfConstruction.Value); - } - else - { - writer.WriteNull("efConstruction"); - } - } - if (Optional.IsDefined(EfSearch)) - { - if (EfSearch != null) - { - writer.WritePropertyName("efSearch"u8); - writer.WriteNumberValue(EfSearch.Value); - } - else - { - writer.WriteNull("efSearch"); - } - } - if (Optional.IsDefined(Metric)) - { - if (Metric != null) - { - writer.WritePropertyName("metric"u8); - writer.WriteStringValue(Metric.Value.ToString()); - } - else - { - writer.WriteNull("metric"); - } - } - writer.WriteEndObject(); - } - - internal static HnswParameters DeserializeHnswParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? m = default; - int? efConstruction = default; - int? efSearch = default; - VectorSearchAlgorithmMetric? metric = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("m"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - m = null; - continue; - } - m = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("efConstruction"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - efConstruction = null; - continue; - } - efConstruction = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("efSearch"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - efSearch = null; - continue; - } - efSearch = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("metric"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - metric = null; - continue; - } - metric = new VectorSearchAlgorithmMetric(property.Value.GetString()); - continue; - } - } - return new HnswParameters(m, efConstruction, efSearch, metric); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static HnswParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeHnswParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.cs deleted file mode 100644 index 835c29c56e1f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HnswParameters.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains the parameters specific to the HNSW algorithm. - public partial class HnswParameters - { - /// Initializes a new instance of . - public HnswParameters() - { - } - - /// Initializes a new instance of . - /// The number of bi-directional links created for every new element during construction. Increasing this parameter value may improve recall and reduce retrieval times for datasets with high intrinsic dimensionality at the expense of increased memory consumption and longer indexing time. - /// The size of the dynamic list containing the nearest neighbors, which is used during index time. Increasing this parameter may improve index quality, at the expense of increased indexing time. At a certain point, increasing this parameter leads to diminishing returns. - /// The size of the dynamic list containing the nearest neighbors, which is used during search time. Increasing this parameter may improve search results, at the expense of slower search. At a certain point, increasing this parameter leads to diminishing returns. - /// The similarity metric to use for vector comparisons. - internal HnswParameters(int? m, int? efConstruction, int? efSearch, VectorSearchAlgorithmMetric? metric) - { - M = m; - EfConstruction = efConstruction; - EfSearch = efSearch; - Metric = metric; - } - - /// The number of bi-directional links created for every new element during construction. Increasing this parameter value may improve recall and reduce retrieval times for datasets with high intrinsic dimensionality at the expense of increased memory consumption and longer indexing time. - public int? M { get; set; } - /// The size of the dynamic list containing the nearest neighbors, which is used during index time. Increasing this parameter may improve index quality, at the expense of increased indexing time. At a certain point, increasing this parameter leads to diminishing returns. - public int? EfConstruction { get; set; } - /// The size of the dynamic list containing the nearest neighbors, which is used during search time. Increasing this parameter may improve search results, at the expense of slower search. At a certain point, increasing this parameter leads to diminishing returns. - public int? EfSearch { get; set; } - /// The similarity metric to use for vector comparisons. - public VectorSearchAlgorithmMetric? Metric { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.Serialization.cs deleted file mode 100644 index cc92bae08693..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.Serialization.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class HybridSearch : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTextRecallSize)) - { - writer.WritePropertyName("maxTextRecallSize"u8); - writer.WriteNumberValue(MaxTextRecallSize.Value); - } - if (Optional.IsDefined(CountAndFacetMode)) - { - writer.WritePropertyName("countAndFacetMode"u8); - writer.WriteStringValue(CountAndFacetMode.Value.ToString()); - } - writer.WriteEndObject(); - } - - internal static HybridSearch DeserializeHybridSearch(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTextRecallSize = default; - HybridCountAndFacetMode? countAndFacetMode = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTextRecallSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTextRecallSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("countAndFacetMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - countAndFacetMode = new HybridCountAndFacetMode(property.Value.GetString()); - continue; - } - } - return new HybridSearch(maxTextRecallSize, countAndFacetMode); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static HybridSearch FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeHybridSearch(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.cs deleted file mode 100644 index 94476d175956..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/HybridSearch.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// TThe query parameters to configure hybrid search behaviors. - public partial class HybridSearch - { - /// Initializes a new instance of . - public HybridSearch() - { - } - - /// Initializes a new instance of . - /// Determines the maximum number of documents to be retrieved by the text query portion of a hybrid search request. Those documents will be combined with the documents matching the vector queries to produce a single final list of results. Choosing a larger maxTextRecallSize value will allow retrieving and paging through more documents (using the top and skip parameters), at the cost of higher resource utilization and higher latency. The value needs to be between 1 and 10,000. Default is 1000. - /// Determines whether the count and facets should includes all documents that matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' window. - internal HybridSearch(int? maxTextRecallSize, HybridCountAndFacetMode? countAndFacetMode) - { - MaxTextRecallSize = maxTextRecallSize; - CountAndFacetMode = countAndFacetMode; - } - - /// Determines the maximum number of documents to be retrieved by the text query portion of a hybrid search request. Those documents will be combined with the documents matching the vector queries to produce a single final list of results. Choosing a larger maxTextRecallSize value will allow retrieving and paging through more documents (using the top and skip parameters), at the cost of higher resource utilization and higher latency. The value needs to be between 1 and 10,000. Default is 1000. - public int? MaxTextRecallSize { get; set; } - /// Determines whether the count and facets should includes all documents that matched the search query, or only the documents that are retrieved within the 'maxTextRecallSize' window. - public HybridCountAndFacetMode? CountAndFacetMode { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.Serialization.cs deleted file mode 100644 index 69d45d2636c6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.Serialization.cs +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ImageAnalysisSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsCollectionDefined(VisualFeatures)) - { - writer.WritePropertyName("visualFeatures"u8); - writer.WriteStartArray(); - foreach (var item in VisualFeatures) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Details)) - { - writer.WritePropertyName("details"u8); - writer.WriteStartArray(); - foreach (var item in Details) - { - writer.WriteStringValue(item.ToString()); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static ImageAnalysisSkill DeserializeImageAnalysisSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ImageAnalysisSkillLanguage? defaultLanguageCode = default; - IList visualFeatures = default; - IList details = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new ImageAnalysisSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("visualFeatures"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new VisualFeature(item.GetString())); - } - visualFeatures = array; - continue; - } - if (property.NameEquals("details"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(new ImageDetail(item.GetString())); - } - details = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new ImageAnalysisSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - visualFeatures ?? new ChangeTrackingList(), - details ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ImageAnalysisSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeImageAnalysisSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.cs deleted file mode 100644 index 8e1cfa6bedfb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ImageAnalysisSkill.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that analyzes image files. It extracts a rich set of visual features based on the image content. - public partial class ImageAnalysisSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public ImageAnalysisSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - VisualFeatures = new ChangeTrackingList(); - Details = new ChangeTrackingList(); - ODataType = "#Microsoft.Skills.Vision.ImageAnalysisSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A list of visual features. - /// A string indicating which domain-specific details to return. - internal ImageAnalysisSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, ImageAnalysisSkillLanguage? defaultLanguageCode, IList visualFeatures, IList details) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - VisualFeatures = visualFeatures; - Details = details; - ODataType = oDataType ?? "#Microsoft.Skills.Vision.ImageAnalysisSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public ImageAnalysisSkillLanguage? DefaultLanguageCode { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.Serialization.cs deleted file mode 100644 index 12598dc14baf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.Serialization.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - internal partial class IndexAction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ActionType)) - { - writer.WritePropertyName("@search.action"u8); - writer.WriteStringValue(ActionType.Value.ToSerialString()); - } - foreach (var item in AdditionalProperties) - { - writer.WritePropertyName(item.Key); - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.cs deleted file mode 100644 index 1cd6cda5d31c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexAction.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// Represents an index action that operates on a document. - internal partial class IndexAction - { - /// Initializes a new instance of . - public IndexAction() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The operation to perform on a document in an indexing batch. - /// Additional Properties. - internal IndexAction(IndexActionType? actionType, IDictionary additionalProperties) - { - ActionType = actionType; - AdditionalProperties = additionalProperties; - } - - /// The operation to perform on a document in an indexing batch. - public IndexActionType? ActionType { get; set; } - /// Additional Properties. - public IDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.Serialization.cs deleted file mode 100644 index fc7e47e966c1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.Serialization.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - internal static partial class IndexActionTypeExtensions - { - public static string ToSerialString(this IndexActionType value) => value switch - { - IndexActionType.Upload => "upload", - IndexActionType.Merge => "merge", - IndexActionType.MergeOrUpload => "mergeOrUpload", - IndexActionType.Delete => "delete", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexActionType value.") - }; - - public static IndexActionType ToIndexActionType(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "upload")) return IndexActionType.Upload; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "merge")) return IndexActionType.Merge; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "mergeOrUpload")) return IndexActionType.MergeOrUpload; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "delete")) return IndexActionType.Delete; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexActionType value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.cs deleted file mode 100644 index 5cef5c1be6fa..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexActionType.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The operation to perform on a document in an indexing batch. - public enum IndexActionType - { - /// Inserts the document into the index if it is new and updates it if it exists. All fields are replaced in the update case. - Upload, - /// Merges the specified field values with an existing document. If the document does not exist, the merge will fail. Any field you specify in a merge will replace the existing field in the document. This also applies to collections of primitive and complex types. - Merge, - /// Behaves like merge if a document with the given key already exists in the index. If the document does not exist, it behaves like upload with a new document. - MergeOrUpload, - /// Removes the specified document from the index. Any field you specify in a delete operation other than the key field will be ignored. If you want to remove an individual field from a document, use merge instead and set the field explicitly to null. - Delete - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.Serialization.cs deleted file mode 100644 index fa6a91e9f6ee..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.Serialization.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - internal partial class IndexBatch : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("value"u8); - writer.WriteStartArray(); - foreach (var item in Actions) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.cs deleted file mode 100644 index 8266fc227191..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexBatch.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Models -{ - /// Contains a batch of document write actions to send to the index. - internal partial class IndexBatch - { - /// Initializes a new instance of . - /// The actions in the batch. - /// is null. - public IndexBatch(IEnumerable actions) - { - Argument.AssertNotNull(actions, nameof(actions)); - - Actions = actions.ToList(); - } - - /// Initializes a new instance of . - /// The actions in the batch. - internal IndexBatch(IList actions) - { - Actions = actions; - } - - /// The actions in the batch. - public IList Actions { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.Serialization.cs deleted file mode 100644 index 3adf877a7766..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class IndexDocumentsResult - { - internal static IndexDocumentsResult DeserializeIndexDocumentsResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(IndexingResult.DeserializeIndexingResult(item)); - } - value = array; - continue; - } - } - return new IndexDocumentsResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexDocumentsResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexDocumentsResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.cs deleted file mode 100644 index a8201d3039da..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexDocumentsResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Models -{ - /// Response containing the status of operations for all documents in the indexing request. - public partial class IndexDocumentsResult - { - /// Initializes a new instance of . - /// The list of status information for each document in the indexing request. - internal IndexDocumentsResult(IEnumerable results) - { - Results = results.ToList(); - } - - /// Initializes a new instance of . - /// The list of status information for each document in the indexing request. - internal IndexDocumentsResult(IReadOnlyList results) - { - Results = results; - } - - /// The list of status information for each document in the indexing request. - public IReadOnlyList Results { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.Serialization.cs deleted file mode 100644 index 6cd00f2b438b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.Serialization.cs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class IndexerExecutionResult - { - internal static IndexerExecutionResult DeserializeIndexerExecutionResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IndexerExecutionStatus status = default; - IndexerExecutionStatusDetail? statusDetail = default; - IndexerState currentState = default; - string errorMessage = default; - DateTimeOffset? startTime = default; - DateTimeOffset? endTime = default; - IReadOnlyList errors = default; - IReadOnlyList warnings = default; - int itemsProcessed = default; - int itemsFailed = default; - string initialTrackingState = default; - string finalTrackingState = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("status"u8)) - { - status = property.Value.GetString().ToIndexerExecutionStatus(); - continue; - } - if (property.NameEquals("statusDetail"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - statusDetail = null; - continue; - } - statusDetail = new IndexerExecutionStatusDetail(property.Value.GetString()); - continue; - } - if (property.NameEquals("currentState"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - currentState = IndexerState.DeserializeIndexerState(property.Value); - continue; - } - if (property.NameEquals("errorMessage"u8)) - { - errorMessage = property.Value.GetString(); - continue; - } - if (property.NameEquals("startTime"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - startTime = property.Value.GetDateTimeOffset("O"); - continue; - } - if (property.NameEquals("endTime"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - endTime = null; - continue; - } - endTime = property.Value.GetDateTimeOffset("O"); - continue; - } - if (property.NameEquals("errors"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerError.DeserializeSearchIndexerError(item)); - } - errors = array; - continue; - } - if (property.NameEquals("warnings"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerWarning.DeserializeSearchIndexerWarning(item)); - } - warnings = array; - continue; - } - if (property.NameEquals("itemsProcessed"u8)) - { - itemsProcessed = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("itemsFailed"u8)) - { - itemsFailed = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("initialTrackingState"u8)) - { - initialTrackingState = property.Value.GetString(); - continue; - } - if (property.NameEquals("finalTrackingState"u8)) - { - finalTrackingState = property.Value.GetString(); - continue; - } - } - return new IndexerExecutionResult( - status, - statusDetail, - currentState, - errorMessage, - startTime, - endTime, - errors, - warnings, - itemsProcessed, - itemsFailed, - initialTrackingState, - finalTrackingState); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexerExecutionResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexerExecutionResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.cs deleted file mode 100644 index 16d5530e3ad3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionResult.cs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents the result of an individual indexer execution. - public partial class IndexerExecutionResult - { - /// Initializes a new instance of . - /// The outcome of this indexer execution. - /// The item-level indexing errors. - /// The item-level indexing warnings. - /// The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. - /// The number of items that failed to be indexed during this indexer execution. - internal IndexerExecutionResult(IndexerExecutionStatus status, IEnumerable errors, IEnumerable warnings, int itemCount, int failedItemCount) - { - Status = status; - Errors = errors.ToList(); - Warnings = warnings.ToList(); - ItemCount = itemCount; - FailedItemCount = failedItemCount; - } - - /// Initializes a new instance of . - /// The outcome of this indexer execution. - /// The outcome of this indexer execution. - /// All of the state that defines and dictates the indexer's current execution. - /// The error message indicating the top-level error, if any. - /// The start time of this indexer execution. - /// The end time of this indexer execution, if the execution has already completed. - /// The item-level indexing errors. - /// The item-level indexing warnings. - /// The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. - /// The number of items that failed to be indexed during this indexer execution. - /// Change tracking state with which an indexer execution started. - /// Change tracking state with which an indexer execution finished. - internal IndexerExecutionResult(IndexerExecutionStatus status, IndexerExecutionStatusDetail? statusDetail, IndexerState currentState, string errorMessage, DateTimeOffset? startTime, DateTimeOffset? endTime, IReadOnlyList errors, IReadOnlyList warnings, int itemCount, int failedItemCount, string initialTrackingState, string finalTrackingState) - { - Status = status; - StatusDetail = statusDetail; - CurrentState = currentState; - ErrorMessage = errorMessage; - StartTime = startTime; - EndTime = endTime; - Errors = errors; - Warnings = warnings; - ItemCount = itemCount; - FailedItemCount = failedItemCount; - InitialTrackingState = initialTrackingState; - FinalTrackingState = finalTrackingState; - } - - /// The outcome of this indexer execution. - public IndexerExecutionStatus Status { get; } - /// The outcome of this indexer execution. - public IndexerExecutionStatusDetail? StatusDetail { get; } - /// All of the state that defines and dictates the indexer's current execution. - public IndexerState CurrentState { get; } - /// The error message indicating the top-level error, if any. - public string ErrorMessage { get; } - /// The start time of this indexer execution. - public DateTimeOffset? StartTime { get; } - /// The end time of this indexer execution, if the execution has already completed. - public DateTimeOffset? EndTime { get; } - /// The item-level indexing errors. - public IReadOnlyList Errors { get; } - /// The item-level indexing warnings. - public IReadOnlyList Warnings { get; } - /// The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. - public int ItemCount { get; } - /// The number of items that failed to be indexed during this indexer execution. - public int FailedItemCount { get; } - /// Change tracking state with which an indexer execution started. - public string InitialTrackingState { get; } - /// Change tracking state with which an indexer execution finished. - public string FinalTrackingState { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.Serialization.cs deleted file mode 100644 index 307184571578..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.Serialization.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class IndexerExecutionStatusExtensions - { - public static string ToSerialString(this IndexerExecutionStatus value) => value switch - { - IndexerExecutionStatus.TransientFailure => "transientFailure", - IndexerExecutionStatus.Success => "success", - IndexerExecutionStatus.InProgress => "inProgress", - IndexerExecutionStatus.Reset => "reset", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexerExecutionStatus value.") - }; - - public static IndexerExecutionStatus ToIndexerExecutionStatus(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "transientFailure")) return IndexerExecutionStatus.TransientFailure; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "success")) return IndexerExecutionStatus.Success; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "inProgress")) return IndexerExecutionStatus.InProgress; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "reset")) return IndexerExecutionStatus.Reset; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexerExecutionStatus value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.cs deleted file mode 100644 index d6a9e3809133..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerExecutionStatus.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents the status of an individual indexer execution. - public enum IndexerExecutionStatus - { - /// An indexer invocation has failed, but the failure may be transient. Indexer invocations will continue per schedule. - TransientFailure, - /// Indexer execution completed successfully. - Success, - /// Indexer execution is in progress. - InProgress, - /// Indexer has been reset. - Reset - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.Serialization.cs deleted file mode 100644 index 52b14853ef52..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.Serialization.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class IndexerState - { - internal static IndexerState DeserializeIndexerState(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IndexingMode? mode = default; - string allDocsInitialChangeTrackingState = default; - string allDocsFinalChangeTrackingState = default; - string resetDocsInitialChangeTrackingState = default; - string resetDocsFinalChangeTrackingState = default; - IReadOnlyList resetDocumentKeys = default; - IReadOnlyList resetDatasourceDocumentIds = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("mode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - mode = new IndexingMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("allDocsInitialChangeTrackingState"u8)) - { - allDocsInitialChangeTrackingState = property.Value.GetString(); - continue; - } - if (property.NameEquals("allDocsFinalChangeTrackingState"u8)) - { - allDocsFinalChangeTrackingState = property.Value.GetString(); - continue; - } - if (property.NameEquals("resetDocsInitialChangeTrackingState"u8)) - { - resetDocsInitialChangeTrackingState = property.Value.GetString(); - continue; - } - if (property.NameEquals("resetDocsFinalChangeTrackingState"u8)) - { - resetDocsFinalChangeTrackingState = property.Value.GetString(); - continue; - } - if (property.NameEquals("resetDocumentKeys"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - resetDocumentKeys = array; - continue; - } - if (property.NameEquals("resetDatasourceDocumentIds"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - resetDatasourceDocumentIds = array; - continue; - } - } - return new IndexerState( - mode, - allDocsInitialChangeTrackingState, - allDocsFinalChangeTrackingState, - resetDocsInitialChangeTrackingState, - resetDocsFinalChangeTrackingState, - resetDocumentKeys ?? new ChangeTrackingList(), - resetDatasourceDocumentIds ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexerState FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexerState(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.cs deleted file mode 100644 index 8047262f3e4f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerState.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents all of the state that defines and dictates the indexer's current execution. - public partial class IndexerState - { - /// Initializes a new instance of . - internal IndexerState() - { - ResetDocumentKeys = new ChangeTrackingList(); - ResetDataSourceDocumentIds = new ChangeTrackingList(); - } - - /// The mode the indexer is running in. - public IndexingMode? Mode { get; } - /// The list of document keys that have been reset. The document key is the document's unique identifier for the data in the search index. The indexer will prioritize selectively re-ingesting these keys. - public IReadOnlyList ResetDocumentKeys { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.Serialization.cs deleted file mode 100644 index 816a545302b9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.Serialization.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class IndexerStatusExtensions - { - public static string ToSerialString(this IndexerStatus value) => value switch - { - IndexerStatus.Unknown => "unknown", - IndexerStatus.Error => "error", - IndexerStatus.Running => "running", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexerStatus value.") - }; - - public static IndexerStatus ToIndexerStatus(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "unknown")) return IndexerStatus.Unknown; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "error")) return IndexerStatus.Error; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "running")) return IndexerStatus.Running; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown IndexerStatus value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.cs deleted file mode 100644 index df688e276e30..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexerStatus.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents the overall indexer status. - public enum IndexerStatus - { - /// Indicates that the indexer is in an unknown state. - Unknown, - /// Indicates that the indexer experienced an error that cannot be corrected without human intervention. - Error, - /// Indicates that the indexer is running normally. - Running - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingMode.cs deleted file mode 100644 index d06c75c206b1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingMode.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents the mode the indexer is executing in. - public readonly partial struct IndexingMode : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public IndexingMode(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string AllDocumentsValue = "indexingAllDocs"; - private const string ResetDocumentsValue = "indexingResetDocs"; - /// Determines if two values are the same. - public static bool operator ==(IndexingMode left, IndexingMode right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(IndexingMode left, IndexingMode right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator IndexingMode(string value) => new IndexingMode(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is IndexingMode other && Equals(other); - /// - public bool Equals(IndexingMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.Serialization.cs deleted file mode 100644 index 9f0398068bb5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class IndexingParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(BatchSize)) - { - if (BatchSize != null) - { - writer.WritePropertyName("batchSize"u8); - writer.WriteNumberValue(BatchSize.Value); - } - else - { - writer.WriteNull("batchSize"); - } - } - if (Optional.IsDefined(MaxFailedItems)) - { - if (MaxFailedItems != null) - { - writer.WritePropertyName("maxFailedItems"u8); - writer.WriteNumberValue(MaxFailedItems.Value); - } - else - { - writer.WriteNull("maxFailedItems"); - } - } - if (Optional.IsDefined(MaxFailedItemsPerBatch)) - { - if (MaxFailedItemsPerBatch != null) - { - writer.WritePropertyName("maxFailedItemsPerBatch"u8); - writer.WriteNumberValue(MaxFailedItemsPerBatch.Value); - } - else - { - writer.WriteNull("maxFailedItemsPerBatch"); - } - } - if (Optional.IsDefined(IndexingParametersConfiguration)) - { - writer.WritePropertyName("configuration"u8); - writer.WriteObjectValue(IndexingParametersConfiguration); - } - writer.WriteEndObject(); - } - - internal static IndexingParameters DeserializeIndexingParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? batchSize = default; - int? maxFailedItems = default; - int? maxFailedItemsPerBatch = default; - IndexingParametersConfiguration configuration = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("batchSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - batchSize = null; - continue; - } - batchSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxFailedItems"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxFailedItems = null; - continue; - } - maxFailedItems = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxFailedItemsPerBatch"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxFailedItemsPerBatch = null; - continue; - } - maxFailedItemsPerBatch = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("configuration"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - configuration = Models.IndexingParametersConfiguration.DeserializeIndexingParametersConfiguration(property.Value); - continue; - } - } - return new IndexingParameters(batchSize, maxFailedItems, maxFailedItemsPerBatch, configuration); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexingParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexingParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.cs deleted file mode 100644 index fab204e67262..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParameters.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents parameters for indexer execution. - public partial class IndexingParameters - { - /// Initializes a new instance of . - /// The number of items that are read from the data source and indexed as a single batch in order to improve performance. The default depends on the data source type. - /// The maximum number of items that can fail indexing for indexer execution to still be considered successful. -1 means no limit. Default is 0. - /// The maximum number of items in a single batch that can fail indexing for the batch to still be considered successful. -1 means no limit. Default is 0. - /// A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - internal IndexingParameters(int? batchSize, int? maxFailedItems, int? maxFailedItemsPerBatch, IndexingParametersConfiguration indexingParametersConfiguration) - { - BatchSize = batchSize; - MaxFailedItems = maxFailedItems; - MaxFailedItemsPerBatch = maxFailedItemsPerBatch; - IndexingParametersConfiguration = indexingParametersConfiguration; - } - - /// The number of items that are read from the data source and indexed as a single batch in order to improve performance. The default depends on the data source type. - public int? BatchSize { get; set; } - /// The maximum number of items that can fail indexing for indexer execution to still be considered successful. -1 means no limit. Default is 0. - public int? MaxFailedItems { get; set; } - /// The maximum number of items in a single batch that can fail indexing for the batch to still be considered successful. -1 means no limit. Default is 0. - public int? MaxFailedItemsPerBatch { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.Serialization.cs deleted file mode 100644 index aed6a2d77bef..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.Serialization.cs +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class IndexingParametersConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ParsingMode)) - { - writer.WritePropertyName("parsingMode"u8); - writer.WriteStringValue(ParsingMode.Value.ToString()); - } - if (Optional.IsDefined(ExcludedFileNameExtensions)) - { - writer.WritePropertyName("excludedFileNameExtensions"u8); - writer.WriteStringValue(ExcludedFileNameExtensions); - } - if (Optional.IsDefined(IndexedFileNameExtensions)) - { - writer.WritePropertyName("indexedFileNameExtensions"u8); - writer.WriteStringValue(IndexedFileNameExtensions); - } - if (Optional.IsDefined(FailOnUnsupportedContentType)) - { - writer.WritePropertyName("failOnUnsupportedContentType"u8); - writer.WriteBooleanValue(FailOnUnsupportedContentType.Value); - } - if (Optional.IsDefined(FailOnUnprocessableDocument)) - { - writer.WritePropertyName("failOnUnprocessableDocument"u8); - writer.WriteBooleanValue(FailOnUnprocessableDocument.Value); - } - if (Optional.IsDefined(IndexStorageMetadataOnlyForOversizedDocuments)) - { - writer.WritePropertyName("indexStorageMetadataOnlyForOversizedDocuments"u8); - writer.WriteBooleanValue(IndexStorageMetadataOnlyForOversizedDocuments.Value); - } - if (Optional.IsDefined(DelimitedTextHeaders)) - { - writer.WritePropertyName("delimitedTextHeaders"u8); - writer.WriteStringValue(DelimitedTextHeaders); - } - if (Optional.IsDefined(DelimitedTextDelimiter)) - { - writer.WritePropertyName("delimitedTextDelimiter"u8); - writer.WriteStringValue(DelimitedTextDelimiter); - } - if (Optional.IsDefined(FirstLineContainsHeaders)) - { - writer.WritePropertyName("firstLineContainsHeaders"u8); - writer.WriteBooleanValue(FirstLineContainsHeaders.Value); - } - if (Optional.IsDefined(MarkdownParsingSubmode)) - { - if (MarkdownParsingSubmode != null) - { - writer.WritePropertyName("markdownParsingSubmode"u8); - writer.WriteStringValue(MarkdownParsingSubmode.Value.ToString()); - } - else - { - writer.WriteNull("markdownParsingSubmode"); - } - } - if (Optional.IsDefined(MarkdownHeaderDepth)) - { - if (MarkdownHeaderDepth != null) - { - writer.WritePropertyName("markdownHeaderDepth"u8); - writer.WriteStringValue(MarkdownHeaderDepth.Value.ToString()); - } - else - { - writer.WriteNull("markdownHeaderDepth"); - } - } - if (Optional.IsDefined(DocumentRoot)) - { - writer.WritePropertyName("documentRoot"u8); - writer.WriteStringValue(DocumentRoot); - } - if (Optional.IsDefined(DataToExtract)) - { - writer.WritePropertyName("dataToExtract"u8); - writer.WriteStringValue(DataToExtract.Value.ToString()); - } - if (Optional.IsDefined(ImageAction)) - { - writer.WritePropertyName("imageAction"u8); - writer.WriteStringValue(ImageAction.Value.ToString()); - } - if (Optional.IsDefined(AllowSkillsetToReadFileData)) - { - writer.WritePropertyName("allowSkillsetToReadFileData"u8); - writer.WriteBooleanValue(AllowSkillsetToReadFileData.Value); - } - if (Optional.IsDefined(PdfTextRotationAlgorithm)) - { - writer.WritePropertyName("pdfTextRotationAlgorithm"u8); - writer.WriteStringValue(PdfTextRotationAlgorithm.Value.ToString()); - } - if (Optional.IsDefined(ExecutionEnvironment)) - { - writer.WritePropertyName("executionEnvironment"u8); - writer.WriteStringValue(ExecutionEnvironment.Value.ToString()); - } - if (Optional.IsDefined(_queryTimeout)) - { - writer.WritePropertyName("queryTimeout"u8); - writer.WriteStringValue(_queryTimeout); - } - foreach (var item in AdditionalProperties) - { - writer.WritePropertyName(item.Key); - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - - internal static IndexingParametersConfiguration DeserializeIndexingParametersConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - BlobIndexerParsingMode? parsingMode = default; - string excludedFileNameExtensions = default; - string indexedFileNameExtensions = default; - bool? failOnUnsupportedContentType = default; - bool? failOnUnprocessableDocument = default; - bool? indexStorageMetadataOnlyForOversizedDocuments = default; - string delimitedTextHeaders = default; - string delimitedTextDelimiter = default; - bool? firstLineContainsHeaders = default; - MarkdownParsingSubmode? markdownParsingSubmode = default; - MarkdownHeaderDepth? markdownHeaderDepth = default; - string documentRoot = default; - BlobIndexerDataToExtract? dataToExtract = default; - BlobIndexerImageAction? imageAction = default; - bool? allowSkillsetToReadFileData = default; - BlobIndexerPdfTextRotationAlgorithm? pdfTextRotationAlgorithm = default; - IndexerExecutionEnvironment? executionEnvironment = default; - string queryTimeout = default; - IDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("parsingMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - parsingMode = new BlobIndexerParsingMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("excludedFileNameExtensions"u8)) - { - excludedFileNameExtensions = property.Value.GetString(); - continue; - } - if (property.NameEquals("indexedFileNameExtensions"u8)) - { - indexedFileNameExtensions = property.Value.GetString(); - continue; - } - if (property.NameEquals("failOnUnsupportedContentType"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - failOnUnsupportedContentType = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("failOnUnprocessableDocument"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - failOnUnprocessableDocument = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("indexStorageMetadataOnlyForOversizedDocuments"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - indexStorageMetadataOnlyForOversizedDocuments = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("delimitedTextHeaders"u8)) - { - delimitedTextHeaders = property.Value.GetString(); - continue; - } - if (property.NameEquals("delimitedTextDelimiter"u8)) - { - delimitedTextDelimiter = property.Value.GetString(); - continue; - } - if (property.NameEquals("firstLineContainsHeaders"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - firstLineContainsHeaders = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("markdownParsingSubmode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - markdownParsingSubmode = null; - continue; - } - markdownParsingSubmode = new MarkdownParsingSubmode(property.Value.GetString()); - continue; - } - if (property.NameEquals("markdownHeaderDepth"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - markdownHeaderDepth = null; - continue; - } - markdownHeaderDepth = new MarkdownHeaderDepth(property.Value.GetString()); - continue; - } - if (property.NameEquals("documentRoot"u8)) - { - documentRoot = property.Value.GetString(); - continue; - } - if (property.NameEquals("dataToExtract"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - dataToExtract = new BlobIndexerDataToExtract(property.Value.GetString()); - continue; - } - if (property.NameEquals("imageAction"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - imageAction = new BlobIndexerImageAction(property.Value.GetString()); - continue; - } - if (property.NameEquals("allowSkillsetToReadFileData"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - allowSkillsetToReadFileData = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("pdfTextRotationAlgorithm"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - pdfTextRotationAlgorithm = new BlobIndexerPdfTextRotationAlgorithm(property.Value.GetString()); - continue; - } - if (property.NameEquals("executionEnvironment"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - executionEnvironment = new IndexerExecutionEnvironment(property.Value.GetString()); - continue; - } - if (property.NameEquals("queryTimeout"u8)) - { - queryTimeout = property.Value.GetString(); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new IndexingParametersConfiguration( - parsingMode, - excludedFileNameExtensions, - indexedFileNameExtensions, - failOnUnsupportedContentType, - failOnUnprocessableDocument, - indexStorageMetadataOnlyForOversizedDocuments, - delimitedTextHeaders, - delimitedTextDelimiter, - firstLineContainsHeaders, - markdownParsingSubmode, - markdownHeaderDepth, - documentRoot, - dataToExtract, - imageAction, - allowSkillsetToReadFileData, - pdfTextRotationAlgorithm, - executionEnvironment, - queryTimeout, - additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexingParametersConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexingParametersConfiguration(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.cs deleted file mode 100644 index 65198da470e3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingParametersConfiguration.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - public partial class IndexingParametersConfiguration - { - /// Initializes a new instance of . - public IndexingParametersConfiguration() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// Represents the parsing mode for indexing from an Azure blob data source. - /// Comma-delimited list of filename extensions to ignore when processing from Azure blob storage. For example, you could exclude ".png, .mp4" to skip over those files during indexing. - /// Comma-delimited list of filename extensions to select when processing from Azure blob storage. For example, you could focus indexing on specific application files ".docx, .pptx, .msg" to specifically include those file types. - /// For Azure blobs, set to false if you want to continue indexing when an unsupported content type is encountered, and you don't know all the content types (file extensions) in advance. - /// For Azure blobs, set to false if you want to continue indexing if a document fails indexing. - /// For Azure blobs, set this property to true to still index storage metadata for blob content that is too large to process. Oversized blobs are treated as errors by default. For limits on blob size, see https://learn.microsoft.com/azure/search/search-limits-quotas-capacity. - /// For CSV blobs, specifies a comma-delimited list of column headers, useful for mapping source fields to destination fields in an index. - /// For CSV blobs, specifies the end-of-line single-character delimiter for CSV files where each line starts a new document (for example, "|"). - /// For CSV blobs, indicates that the first (non-blank) line of each blob contains headers. - /// Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. - /// Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. - /// For JSON arrays, given a structured or semi-structured document, you can specify a path to the array using this property. - /// Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when "imageAction" is set to a value other than "none". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. - /// Determines how to process embedded images and image files in Azure blob storage. Setting the "imageAction" configuration to any value other than "none" requires that a skillset also be attached to that indexer. - /// If true, will create a path //document//file_data that is an object representing the original file data downloaded from your blob data source. This allows you to pass the original file data to a custom skill for processing within the enrichment pipeline, or to the Document Extraction skill. - /// Determines algorithm for text extraction from PDF files in Azure blob storage. - /// Specifies the environment in which the indexer should execute. - /// Increases the timeout beyond the 5-minute default for Azure SQL database data sources, specified in the format "hh:mm:ss". - /// Additional Properties. - internal IndexingParametersConfiguration(BlobIndexerParsingMode? parsingMode, string excludedFileNameExtensions, string indexedFileNameExtensions, bool? failOnUnsupportedContentType, bool? failOnUnprocessableDocument, bool? indexStorageMetadataOnlyForOversizedDocuments, string delimitedTextHeaders, string delimitedTextDelimiter, bool? firstLineContainsHeaders, MarkdownParsingSubmode? markdownParsingSubmode, MarkdownHeaderDepth? markdownHeaderDepth, string documentRoot, BlobIndexerDataToExtract? dataToExtract, BlobIndexerImageAction? imageAction, bool? allowSkillsetToReadFileData, BlobIndexerPdfTextRotationAlgorithm? pdfTextRotationAlgorithm, IndexerExecutionEnvironment? executionEnvironment, string queryTimeout, IDictionary additionalProperties) - { - ParsingMode = parsingMode; - ExcludedFileNameExtensions = excludedFileNameExtensions; - IndexedFileNameExtensions = indexedFileNameExtensions; - FailOnUnsupportedContentType = failOnUnsupportedContentType; - FailOnUnprocessableDocument = failOnUnprocessableDocument; - IndexStorageMetadataOnlyForOversizedDocuments = indexStorageMetadataOnlyForOversizedDocuments; - DelimitedTextHeaders = delimitedTextHeaders; - DelimitedTextDelimiter = delimitedTextDelimiter; - FirstLineContainsHeaders = firstLineContainsHeaders; - MarkdownParsingSubmode = markdownParsingSubmode; - MarkdownHeaderDepth = markdownHeaderDepth; - DocumentRoot = documentRoot; - DataToExtract = dataToExtract; - ImageAction = imageAction; - AllowSkillsetToReadFileData = allowSkillsetToReadFileData; - PdfTextRotationAlgorithm = pdfTextRotationAlgorithm; - ExecutionEnvironment = executionEnvironment; - _queryTimeout = queryTimeout; - AdditionalProperties = additionalProperties; - } - - /// Represents the parsing mode for indexing from an Azure blob data source. - public BlobIndexerParsingMode? ParsingMode { get; set; } - /// Comma-delimited list of filename extensions to ignore when processing from Azure blob storage. For example, you could exclude ".png, .mp4" to skip over those files during indexing. - public string ExcludedFileNameExtensions { get; set; } - /// Comma-delimited list of filename extensions to select when processing from Azure blob storage. For example, you could focus indexing on specific application files ".docx, .pptx, .msg" to specifically include those file types. - public string IndexedFileNameExtensions { get; set; } - /// For Azure blobs, set to false if you want to continue indexing when an unsupported content type is encountered, and you don't know all the content types (file extensions) in advance. - public bool? FailOnUnsupportedContentType { get; set; } - /// For Azure blobs, set to false if you want to continue indexing if a document fails indexing. - public bool? FailOnUnprocessableDocument { get; set; } - /// For Azure blobs, set this property to true to still index storage metadata for blob content that is too large to process. Oversized blobs are treated as errors by default. For limits on blob size, see https://learn.microsoft.com/azure/search/search-limits-quotas-capacity. - public bool? IndexStorageMetadataOnlyForOversizedDocuments { get; set; } - /// For CSV blobs, specifies a comma-delimited list of column headers, useful for mapping source fields to destination fields in an index. - public string DelimitedTextHeaders { get; set; } - /// For CSV blobs, specifies the end-of-line single-character delimiter for CSV files where each line starts a new document (for example, "|"). - public string DelimitedTextDelimiter { get; set; } - /// For CSV blobs, indicates that the first (non-blank) line of each blob contains headers. - public bool? FirstLineContainsHeaders { get; set; } - /// Specifies the submode that will determine whether a markdown file will be parsed into exactly one search document or multiple search documents. Default is `oneToMany`. - public MarkdownParsingSubmode? MarkdownParsingSubmode { get; set; } - /// Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. - public MarkdownHeaderDepth? MarkdownHeaderDepth { get; set; } - /// For JSON arrays, given a structured or semi-structured document, you can specify a path to the array using this property. - public string DocumentRoot { get; set; } - /// Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when "imageAction" is set to a value other than "none". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. - public BlobIndexerDataToExtract? DataToExtract { get; set; } - /// Determines how to process embedded images and image files in Azure blob storage. Setting the "imageAction" configuration to any value other than "none" requires that a skillset also be attached to that indexer. - public BlobIndexerImageAction? ImageAction { get; set; } - /// If true, will create a path //document//file_data that is an object representing the original file data downloaded from your blob data source. This allows you to pass the original file data to a custom skill for processing within the enrichment pipeline, or to the Document Extraction skill. - public bool? AllowSkillsetToReadFileData { get; set; } - /// Determines algorithm for text extraction from PDF files in Azure blob storage. - public BlobIndexerPdfTextRotationAlgorithm? PdfTextRotationAlgorithm { get; set; } - /// Specifies the environment in which the indexer should execute. - public IndexerExecutionEnvironment? ExecutionEnvironment { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.Serialization.cs deleted file mode 100644 index 877ddf72a4ba..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class IndexingResult - { - internal static IndexingResult DeserializeIndexingResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string key = default; - string errorMessage = default; - bool status = default; - int statusCode = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("errorMessage"u8)) - { - errorMessage = property.Value.GetString(); - continue; - } - if (property.NameEquals("status"u8)) - { - status = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("statusCode"u8)) - { - statusCode = property.Value.GetInt32(); - continue; - } - } - return new IndexingResult(key, errorMessage, status, statusCode); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexingResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexingResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.cs deleted file mode 100644 index bf84934bc27c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingResult.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Status of an indexing operation for a single document. - public partial class IndexingResult - { - /// Initializes a new instance of . - /// The key of a document that was in the indexing request. - /// A value indicating whether the indexing operation succeeded for the document identified by the key. - /// The status code of the indexing operation. Possible values include: 200 for a successful update or delete, 201 for successful document creation, 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. - internal IndexingResult(string key, bool succeeded, int status) - { - Key = key; - Succeeded = succeeded; - Status = status; - } - - /// Initializes a new instance of . - /// The key of a document that was in the indexing request. - /// The error message explaining why the indexing operation failed for the document identified by the key; null if indexing succeeded. - /// A value indicating whether the indexing operation succeeded for the document identified by the key. - /// The status code of the indexing operation. Possible values include: 200 for a successful update or delete, 201 for successful document creation, 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. - internal IndexingResult(string key, string errorMessage, bool succeeded, int status) - { - Key = key; - ErrorMessage = errorMessage; - Succeeded = succeeded; - Status = status; - } - - /// The key of a document that was in the indexing request. - public string Key { get; } - /// The error message explaining why the indexing operation failed for the document identified by the key; null if indexing succeeded. - public string ErrorMessage { get; } - /// A value indicating whether the indexing operation succeeded for the document identified by the key. - public bool Succeeded { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.Serialization.cs deleted file mode 100644 index b34e062b1cca..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.Serialization.cs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class IndexingSchedule : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("interval"u8); - writer.WriteStringValue(Interval, "P"); - if (Optional.IsDefined(StartTime)) - { - writer.WritePropertyName("startTime"u8); - writer.WriteStringValue(StartTime.Value, "O"); - } - writer.WriteEndObject(); - } - - internal static IndexingSchedule DeserializeIndexingSchedule(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TimeSpan interval = default; - DateTimeOffset? startTime = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("interval"u8)) - { - interval = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("startTime"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - startTime = property.Value.GetDateTimeOffset("O"); - continue; - } - } - return new IndexingSchedule(interval, startTime); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static IndexingSchedule FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeIndexingSchedule(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.cs deleted file mode 100644 index 186014638b39..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/IndexingSchedule.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a schedule for indexer execution. - public partial class IndexingSchedule - { - /// Initializes a new instance of . - /// The interval of time between indexer executions. - public IndexingSchedule(TimeSpan interval) - { - Interval = interval; - } - - /// Initializes a new instance of . - /// The interval of time between indexer executions. - /// The time when an indexer should start running. - internal IndexingSchedule(TimeSpan interval, DateTimeOffset? startTime) - { - Interval = interval; - StartTime = startTime; - } - - /// The interval of time between indexer executions. - public TimeSpan Interval { get; set; } - /// The time when an indexer should start running. - public DateTimeOffset? StartTime { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.Serialization.cs deleted file mode 100644 index 0ec8ec09983f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.Serialization.cs +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class InputFieldMappingEntry : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static InputFieldMappingEntry DeserializeInputFieldMappingEntry(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new InputFieldMappingEntry(name, source, sourceContext, inputs ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static InputFieldMappingEntry FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeInputFieldMappingEntry(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.cs deleted file mode 100644 index 763b79ac9e92..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/InputFieldMappingEntry.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Input field mapping for a skill. - public partial class InputFieldMappingEntry - { - /// Initializes a new instance of . - /// The name of the input. - /// is null. - public InputFieldMappingEntry(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - Inputs = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The name of the input. - /// The source of the input. - /// The source context used for selecting recursive inputs. - /// The recursive inputs used when creating a complex type. - internal InputFieldMappingEntry(string name, string source, string sourceContext, IList inputs) - { - Name = name; - Source = source; - SourceContext = sourceContext; - Inputs = inputs; - } - - /// The name of the input. - public string Name { get; set; } - /// The source of the input. - public string Source { get; set; } - /// The source context used for selecting recursive inputs. - public string SourceContext { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.Serialization.cs deleted file mode 100644 index fc931f7e5541..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.Serialization.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KeepTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("keepWords"u8); - writer.WriteStartArray(); - foreach (var item in KeepWords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(LowerCaseKeepWords)) - { - writer.WritePropertyName("keepWordsCase"u8); - writer.WriteBooleanValue(LowerCaseKeepWords.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static KeepTokenFilter DeserializeKeepTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList keepWords = default; - bool? keepWordsCase = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("keepWords"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - keepWords = array; - continue; - } - if (property.NameEquals("keepWordsCase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - keepWordsCase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new KeepTokenFilter(odataType, name, keepWords, keepWordsCase); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KeepTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKeepTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.cs deleted file mode 100644 index a4429a1ea475..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeepTokenFilter.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A token filter that only keeps tokens with text contained in a specified list of words. This token filter is implemented using Apache Lucene. - public partial class KeepTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The list of words to keep. - /// or is null. - public KeepTokenFilter(string name, IEnumerable keepWords) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(keepWords, nameof(keepWords)); - - KeepWords = keepWords.ToList(); - ODataType = "#Microsoft.Azure.Search.KeepTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The list of words to keep. - /// A value indicating whether to lower case all words first. Default is false. - internal KeepTokenFilter(string oDataType, string name, IList keepWords, bool? lowerCaseKeepWords) : base(oDataType, name) - { - KeepWords = keepWords; - LowerCaseKeepWords = lowerCaseKeepWords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.KeepTokenFilter"; - } - /// A value indicating whether to lower case all words first. Default is false. - public bool? LowerCaseKeepWords { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.Serialization.cs deleted file mode 100644 index 91ea38df0309..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.Serialization.cs +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KeyPhraseExtractionSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(MaxKeyPhraseCount)) - { - if (MaxKeyPhraseCount != null) - { - writer.WritePropertyName("maxKeyPhraseCount"u8); - writer.WriteNumberValue(MaxKeyPhraseCount.Value); - } - else - { - writer.WriteNull("maxKeyPhraseCount"); - } - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static KeyPhraseExtractionSkill DeserializeKeyPhraseExtractionSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - KeyPhraseExtractionSkillLanguage? defaultLanguageCode = default; - int? maxKeyPhraseCount = default; - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new KeyPhraseExtractionSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("maxKeyPhraseCount"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxKeyPhraseCount = null; - continue; - } - maxKeyPhraseCount = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new KeyPhraseExtractionSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - maxKeyPhraseCount, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KeyPhraseExtractionSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKeyPhraseExtractionSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.cs deleted file mode 100644 index e99f474f8e93..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeyPhraseExtractionSkill.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that uses text analytics for key phrase extraction. - public partial class KeyPhraseExtractionSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public KeyPhraseExtractionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A number indicating how many key phrases to return. If absent, all identified key phrases will be returned. - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - internal KeyPhraseExtractionSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, KeyPhraseExtractionSkillLanguage? defaultLanguageCode, int? maxKeyPhraseCount, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - MaxKeyPhraseCount = maxKeyPhraseCount; - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public KeyPhraseExtractionSkillLanguage? DefaultLanguageCode { get; set; } - /// A number indicating how many key phrases to return. If absent, all identified key phrases will be returned. - public int? MaxKeyPhraseCount { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.Serialization.cs deleted file mode 100644 index 2c1ac7aace23..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.Serialization.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KeywordMarkerTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("keywords"u8); - writer.WriteStartArray(); - foreach (var item in Keywords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(IgnoreCase)) - { - writer.WritePropertyName("ignoreCase"u8); - writer.WriteBooleanValue(IgnoreCase.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static KeywordMarkerTokenFilter DeserializeKeywordMarkerTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList keywords = default; - bool? ignoreCase = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("keywords"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - keywords = array; - continue; - } - if (property.NameEquals("ignoreCase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - ignoreCase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new KeywordMarkerTokenFilter(odataType, name, keywords, ignoreCase); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KeywordMarkerTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKeywordMarkerTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.cs deleted file mode 100644 index 5ed7f96e3de2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordMarkerTokenFilter.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Marks terms as keywords. This token filter is implemented using Apache Lucene. - public partial class KeywordMarkerTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of words to mark as keywords. - /// or is null. - public KeywordMarkerTokenFilter(string name, IEnumerable keywords) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(keywords, nameof(keywords)); - - Keywords = keywords.ToList(); - ODataType = "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of words to mark as keywords. - /// A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. - internal KeywordMarkerTokenFilter(string oDataType, string name, IList keywords, bool? ignoreCase) : base(oDataType, name) - { - Keywords = keywords; - IgnoreCase = ignoreCase; - ODataType = oDataType ?? "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; - } - /// A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. - public bool? IgnoreCase { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.Serialization.cs deleted file mode 100644 index 390f73702826..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.Serialization.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KeywordTokenizer : IUtf8JsonSerializable - { - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KeywordTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKeywordTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.cs deleted file mode 100644 index a7c9f087b376..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KeywordTokenizer.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene. - public partial class KeywordTokenizer : LexicalTokenizer - { - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.Serialization.cs deleted file mode 100644 index 5b4c7ba81149..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.Serialization.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStore : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("storageConnectionString"u8); - writer.WriteStringValue(StorageConnectionString); - writer.WritePropertyName("projections"u8); - writer.WriteStartArray(); - foreach (var item in Projections) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(Identity)) - { - if (Identity != null) - { - writer.WritePropertyName("identity"u8); - writer.WriteObjectValue(Identity); - } - else - { - writer.WriteNull("identity"); - } - } - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("parameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStore DeserializeKnowledgeStore(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string storageConnectionString = default; - IList projections = default; - SearchIndexerDataIdentity identity = default; - SearchIndexerKnowledgeStoreParameters parameters = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("storageConnectionString"u8)) - { - storageConnectionString = property.Value.GetString(); - continue; - } - if (property.NameEquals("projections"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(KnowledgeStoreProjection.DeserializeKnowledgeStoreProjection(item)); - } - projections = array; - continue; - } - if (property.NameEquals("identity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - identity = null; - continue; - } - identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("parameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - parameters = SearchIndexerKnowledgeStoreParameters.DeserializeSearchIndexerKnowledgeStoreParameters(property.Value); - continue; - } - } - return new KnowledgeStore(storageConnectionString, projections, identity, parameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static KnowledgeStore FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStore(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.cs deleted file mode 100644 index 64d90d0c5341..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStore.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Definition of additional projections to azure blob, table, or files, of enriched data. - public partial class KnowledgeStore - { - /// Initializes a new instance of . - /// The connection string to the storage account projections will be stored in. - /// A list of additional projections to perform during indexing. - /// or is null. - public KnowledgeStore(string storageConnectionString, IEnumerable projections) - { - Argument.AssertNotNull(storageConnectionString, nameof(storageConnectionString)); - Argument.AssertNotNull(projections, nameof(projections)); - - StorageConnectionString = storageConnectionString; - Projections = projections.ToList(); - } - - /// Initializes a new instance of . - /// The connection string to the storage account projections will be stored in. - /// A list of additional projections to perform during indexing. - /// - /// The user-assigned managed identity used for connections to Azure Storage when writing knowledge store projections. If the connection string indicates an identity (ResourceId) and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// A dictionary of knowledge store-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - internal KnowledgeStore(string storageConnectionString, IList projections, SearchIndexerDataIdentity identity, SearchIndexerKnowledgeStoreParameters parameters) - { - StorageConnectionString = storageConnectionString; - Projections = projections; - Identity = identity; - Parameters = parameters; - } - - /// The connection string to the storage account projections will be stored in. - public string StorageConnectionString { get; set; } - /// A list of additional projections to perform during indexing. - public IList Projections { get; } - /// - /// The user-assigned managed identity used for connections to Azure Storage when writing knowledge store projections. If the connection string indicates an identity (ResourceId) and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity Identity { get; set; } - /// A dictionary of knowledge store-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - public SearchIndexerKnowledgeStoreParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.Serialization.cs deleted file mode 100644 index 2d241a3680b5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreFileProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("storageContainer"u8); - writer.WriteStringValue(StorageContainer); - if (Optional.IsDefined(ReferenceKeyName)) - { - writer.WritePropertyName("referenceKeyName"u8); - writer.WriteStringValue(ReferenceKeyName); - } - if (Optional.IsDefined(GeneratedKeyName)) - { - writer.WritePropertyName("generatedKeyName"u8); - writer.WriteStringValue(GeneratedKeyName); - } - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreFileProjectionSelector DeserializeKnowledgeStoreFileProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string storageContainer = default; - string referenceKeyName = default; - string generatedKeyName = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("storageContainer"u8)) - { - storageContainer = property.Value.GetString(); - continue; - } - if (property.NameEquals("referenceKeyName"u8)) - { - referenceKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("generatedKeyName"u8)) - { - generatedKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new KnowledgeStoreFileProjectionSelector( - referenceKeyName, - generatedKeyName, - source, - sourceContext, - inputs ?? new ChangeTrackingList(), - storageContainer); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KnowledgeStoreFileProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreFileProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.cs deleted file mode 100644 index e0b91c0f9339..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreFileProjectionSelector.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Projection definition for what data to store in Azure Files. - public partial class KnowledgeStoreFileProjectionSelector : KnowledgeStoreStorageProjectionSelector - { - /// Initializes a new instance of . - /// Blob container to store projections in. - /// is null. - public KnowledgeStoreFileProjectionSelector(string storageContainer) : base(storageContainer) - { - Argument.AssertNotNull(storageContainer, nameof(storageContainer)); - } - - /// Initializes a new instance of . - /// Name of reference key to different projection. - /// Name of generated key to store projection under. - /// Source data to project. - /// Source context for complex projections. - /// Nested inputs for complex projections. - /// Blob container to store projections in. - internal KnowledgeStoreFileProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, storageContainer) - { - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.Serialization.cs deleted file mode 100644 index c47998060921..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreObjectProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("storageContainer"u8); - writer.WriteStringValue(StorageContainer); - if (Optional.IsDefined(ReferenceKeyName)) - { - writer.WritePropertyName("referenceKeyName"u8); - writer.WriteStringValue(ReferenceKeyName); - } - if (Optional.IsDefined(GeneratedKeyName)) - { - writer.WritePropertyName("generatedKeyName"u8); - writer.WriteStringValue(GeneratedKeyName); - } - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreObjectProjectionSelector DeserializeKnowledgeStoreObjectProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string storageContainer = default; - string referenceKeyName = default; - string generatedKeyName = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("storageContainer"u8)) - { - storageContainer = property.Value.GetString(); - continue; - } - if (property.NameEquals("referenceKeyName"u8)) - { - referenceKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("generatedKeyName"u8)) - { - generatedKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new KnowledgeStoreObjectProjectionSelector( - referenceKeyName, - generatedKeyName, - source, - sourceContext, - inputs ?? new ChangeTrackingList(), - storageContainer); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KnowledgeStoreObjectProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreObjectProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.cs deleted file mode 100644 index 323d29c1e4ee..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreObjectProjectionSelector.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Projection definition for what data to store in Azure Blob. - public partial class KnowledgeStoreObjectProjectionSelector : KnowledgeStoreStorageProjectionSelector - { - /// Initializes a new instance of . - /// Blob container to store projections in. - /// is null. - public KnowledgeStoreObjectProjectionSelector(string storageContainer) : base(storageContainer) - { - Argument.AssertNotNull(storageContainer, nameof(storageContainer)); - } - - /// Initializes a new instance of . - /// Name of reference key to different projection. - /// Name of generated key to store projection under. - /// Source data to project. - /// Source context for complex projections. - /// Nested inputs for complex projections. - /// Blob container to store projections in. - internal KnowledgeStoreObjectProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs, storageContainer) - { - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.Serialization.cs deleted file mode 100644 index 27b6e653a6b6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.Serialization.cs +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreProjection : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Tables)) - { - writer.WritePropertyName("tables"u8); - writer.WriteStartArray(); - foreach (var item in Tables) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Objects)) - { - writer.WritePropertyName("objects"u8); - writer.WriteStartArray(); - foreach (var item in Objects) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Files)) - { - writer.WritePropertyName("files"u8); - writer.WriteStartArray(); - foreach (var item in Files) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreProjection DeserializeKnowledgeStoreProjection(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList tables = default; - IList objects = default; - IList files = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tables"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(KnowledgeStoreTableProjectionSelector.DeserializeKnowledgeStoreTableProjectionSelector(item)); - } - tables = array; - continue; - } - if (property.NameEquals("objects"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(KnowledgeStoreObjectProjectionSelector.DeserializeKnowledgeStoreObjectProjectionSelector(item)); - } - objects = array; - continue; - } - if (property.NameEquals("files"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(KnowledgeStoreFileProjectionSelector.DeserializeKnowledgeStoreFileProjectionSelector(item)); - } - files = array; - continue; - } - } - return new KnowledgeStoreProjection(tables ?? new ChangeTrackingList(), objects ?? new ChangeTrackingList(), files ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static KnowledgeStoreProjection FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreProjection(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.cs deleted file mode 100644 index f8b9bc2da7cf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjection.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Container object for various projection selectors. - public partial class KnowledgeStoreProjection - { - /// Initializes a new instance of . - public KnowledgeStoreProjection() - { - Tables = new ChangeTrackingList(); - Objects = new ChangeTrackingList(); - Files = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Projections to Azure Table storage. - /// Projections to Azure Blob storage. - /// Projections to Azure File storage. - internal KnowledgeStoreProjection(IList tables, IList objects, IList files) - { - Tables = tables; - Objects = objects; - Files = files; - } - - /// Projections to Azure Table storage. - public IList Tables { get; } - /// Projections to Azure Blob storage. - public IList Objects { get; } - /// Projections to Azure File storage. - public IList Files { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.Serialization.cs deleted file mode 100644 index c623670b2466..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.Serialization.cs +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ReferenceKeyName)) - { - writer.WritePropertyName("referenceKeyName"u8); - writer.WriteStringValue(ReferenceKeyName); - } - if (Optional.IsDefined(GeneratedKeyName)) - { - writer.WritePropertyName("generatedKeyName"u8); - writer.WriteStringValue(GeneratedKeyName); - } - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreProjectionSelector DeserializeKnowledgeStoreProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string referenceKeyName = default; - string generatedKeyName = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("referenceKeyName"u8)) - { - referenceKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("generatedKeyName"u8)) - { - generatedKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new KnowledgeStoreProjectionSelector(referenceKeyName, generatedKeyName, source, sourceContext, inputs ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static KnowledgeStoreProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.cs deleted file mode 100644 index d514104a5151..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreProjectionSelector.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Abstract class to share properties between concrete selectors. - public partial class KnowledgeStoreProjectionSelector - { - /// Initializes a new instance of . - public KnowledgeStoreProjectionSelector() - { - Inputs = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Name of reference key to different projection. - /// Name of generated key to store projection under. - /// Source data to project. - /// Source context for complex projections. - /// Nested inputs for complex projections. - internal KnowledgeStoreProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs) - { - ReferenceKeyName = referenceKeyName; - GeneratedKeyName = generatedKeyName; - Source = source; - SourceContext = sourceContext; - Inputs = inputs; - } - - /// Name of reference key to different projection. - public string ReferenceKeyName { get; set; } - /// Name of generated key to store projection under. - public string GeneratedKeyName { get; set; } - /// Source data to project. - public string Source { get; set; } - /// Source context for complex projections. - public string SourceContext { get; set; } - /// Nested inputs for complex projections. - public IList Inputs { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.Serialization.cs deleted file mode 100644 index e53e1fcd25ad..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreStorageProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("storageContainer"u8); - writer.WriteStringValue(StorageContainer); - if (Optional.IsDefined(ReferenceKeyName)) - { - writer.WritePropertyName("referenceKeyName"u8); - writer.WriteStringValue(ReferenceKeyName); - } - if (Optional.IsDefined(GeneratedKeyName)) - { - writer.WritePropertyName("generatedKeyName"u8); - writer.WriteStringValue(GeneratedKeyName); - } - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreStorageProjectionSelector DeserializeKnowledgeStoreStorageProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string storageContainer = default; - string referenceKeyName = default; - string generatedKeyName = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("storageContainer"u8)) - { - storageContainer = property.Value.GetString(); - continue; - } - if (property.NameEquals("referenceKeyName"u8)) - { - referenceKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("generatedKeyName"u8)) - { - generatedKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new KnowledgeStoreStorageProjectionSelector( - referenceKeyName, - generatedKeyName, - source, - sourceContext, - inputs ?? new ChangeTrackingList(), - storageContainer); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KnowledgeStoreStorageProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreStorageProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.cs deleted file mode 100644 index 34f7a292a26a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreStorageProjectionSelector.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Abstract class to share properties between concrete selectors. - public partial class KnowledgeStoreStorageProjectionSelector : KnowledgeStoreProjectionSelector - { - /// Initializes a new instance of . - /// Blob container to store projections in. - /// is null. - public KnowledgeStoreStorageProjectionSelector(string storageContainer) - { - Argument.AssertNotNull(storageContainer, nameof(storageContainer)); - - StorageContainer = storageContainer; - } - - /// Initializes a new instance of . - /// Name of reference key to different projection. - /// Name of generated key to store projection under. - /// Source data to project. - /// Source context for complex projections. - /// Nested inputs for complex projections. - /// Blob container to store projections in. - internal KnowledgeStoreStorageProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, string storageContainer) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs) - { - StorageContainer = storageContainer; - } - - /// Blob container to store projections in. - public string StorageContainer { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.Serialization.cs deleted file mode 100644 index bd2e0078a964..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class KnowledgeStoreTableProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("tableName"u8); - writer.WriteStringValue(TableName); - if (Optional.IsDefined(ReferenceKeyName)) - { - writer.WritePropertyName("referenceKeyName"u8); - writer.WriteStringValue(ReferenceKeyName); - } - if (Optional.IsDefined(GeneratedKeyName)) - { - writer.WritePropertyName("generatedKeyName"u8); - writer.WriteStringValue(GeneratedKeyName); - } - if (Optional.IsDefined(Source)) - { - writer.WritePropertyName("source"u8); - writer.WriteStringValue(Source); - } - if (Optional.IsDefined(SourceContext)) - { - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - } - if (Optional.IsCollectionDefined(Inputs)) - { - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static KnowledgeStoreTableProjectionSelector DeserializeKnowledgeStoreTableProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string tableName = default; - string referenceKeyName = default; - string generatedKeyName = default; - string source = default; - string sourceContext = default; - IList inputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tableName"u8)) - { - tableName = property.Value.GetString(); - continue; - } - if (property.NameEquals("referenceKeyName"u8)) - { - referenceKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("generatedKeyName"u8)) - { - generatedKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("source"u8)) - { - source = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - } - return new KnowledgeStoreTableProjectionSelector( - referenceKeyName, - generatedKeyName, - source, - sourceContext, - inputs ?? new ChangeTrackingList(), - tableName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new KnowledgeStoreTableProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeKnowledgeStoreTableProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.cs deleted file mode 100644 index 58f6e6341a66..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/KnowledgeStoreTableProjectionSelector.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Description for what data to store in Azure Tables. - public partial class KnowledgeStoreTableProjectionSelector : KnowledgeStoreProjectionSelector - { - /// Initializes a new instance of . - /// Name of the Azure table to store projected data in. - /// is null. - public KnowledgeStoreTableProjectionSelector(string tableName) - { - Argument.AssertNotNull(tableName, nameof(tableName)); - - TableName = tableName; - } - - /// Initializes a new instance of . - /// Name of reference key to different projection. - /// Name of generated key to store projection under. - /// Source data to project. - /// Source context for complex projections. - /// Nested inputs for complex projections. - /// Name of the Azure table to store projected data in. - internal KnowledgeStoreTableProjectionSelector(string referenceKeyName, string generatedKeyName, string source, string sourceContext, IList inputs, string tableName) : base(referenceKeyName, generatedKeyName, source, sourceContext, inputs) - { - TableName = tableName; - } - - /// Name of the Azure table to store projected data in. - public string TableName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.Serialization.cs deleted file mode 100644 index 855d6aa2fd89..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.Serialization.cs +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LanguageDetectionSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultCountryHint)) - { - if (DefaultCountryHint != null) - { - writer.WritePropertyName("defaultCountryHint"u8); - writer.WriteStringValue(DefaultCountryHint); - } - else - { - writer.WriteNull("defaultCountryHint"); - } - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static LanguageDetectionSkill DeserializeLanguageDetectionSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string defaultCountryHint = default; - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultCountryHint"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultCountryHint = null; - continue; - } - defaultCountryHint = property.Value.GetString(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new LanguageDetectionSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultCountryHint, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new LanguageDetectionSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLanguageDetectionSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.cs deleted file mode 100644 index 12ffc9f170fa..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LanguageDetectionSkill.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that detects the language of input text and reports a single language code for every document submitted on the request. The language code is paired with a score indicating the confidence of the analysis. - public partial class LanguageDetectionSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public LanguageDetectionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.LanguageDetectionSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A country code to use as a hint to the language detection model if it cannot disambiguate the language. - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - internal LanguageDetectionSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string defaultCountryHint, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultCountryHint = defaultCountryHint; - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Text.LanguageDetectionSkill"; - } - - /// A country code to use as a hint to the language detection model if it cannot disambiguate the language. - public string DefaultCountryHint { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.Serialization.cs deleted file mode 100644 index 164b39d92bea..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.Serialization.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LengthTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MinLength)) - { - writer.WritePropertyName("min"u8); - writer.WriteNumberValue(MinLength.Value); - } - if (Optional.IsDefined(MaxLength)) - { - writer.WritePropertyName("max"u8); - writer.WriteNumberValue(MaxLength.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LengthTokenFilter DeserializeLengthTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? min = default; - int? max = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("min"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - min = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("max"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - max = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new LengthTokenFilter(odataType, name, min, max); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new LengthTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLengthTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.cs deleted file mode 100644 index 0cc11a9aef0c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LengthTokenFilter.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Removes words that are too long or too short. This token filter is implemented using Apache Lucene. - public partial class LengthTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public LengthTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.LengthTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The minimum length in characters. Default is 0. Maximum is 300. Must be less than the value of max. - /// The maximum length in characters. Default and maximum is 300. - internal LengthTokenFilter(string oDataType, string name, int? minLength, int? maxLength) : base(oDataType, name) - { - MinLength = minLength; - MaxLength = maxLength; - ODataType = oDataType ?? "#Microsoft.Azure.Search.LengthTokenFilter"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.Serialization.cs deleted file mode 100644 index e2701deaac0d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.Serialization.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LexicalAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LexicalAnalyzer DeserializeLexicalAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.CustomAnalyzer": return CustomAnalyzer.DeserializeCustomAnalyzer(element); - case "#Microsoft.Azure.Search.PatternAnalyzer": return PatternAnalyzer.DeserializePatternAnalyzer(element); - case "#Microsoft.Azure.Search.StandardAnalyzer": return LuceneStandardAnalyzer.DeserializeLuceneStandardAnalyzer(element); - case "#Microsoft.Azure.Search.StopAnalyzer": return StopAnalyzer.DeserializeStopAnalyzer(element); - } - } - return UnknownLexicalAnalyzer.DeserializeUnknownLexicalAnalyzer(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static LexicalAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLexicalAnalyzer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.cs deleted file mode 100644 index 53cf65dad465..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalAnalyzer.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for analyzers. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public partial class LexicalAnalyzer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal LexicalAnalyzer(string oDataType, string name) - { - ODataType = oDataType; - Name = name; - } - - /// A URI fragment specifying the type of analyzer. - internal string ODataType { get; set; } - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.Serialization.cs deleted file mode 100644 index 19893b348aef..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LexicalNormalizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LexicalNormalizer DeserializeLexicalNormalizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.CustomNormalizer": return CustomNormalizer.DeserializeCustomNormalizer(element); - } - } - return UnknownLexicalNormalizer.DeserializeUnknownLexicalNormalizer(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static LexicalNormalizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLexicalNormalizer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.cs deleted file mode 100644 index 4f5638a8dc94..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizer.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for normalizers. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include . - /// - public partial class LexicalNormalizer - { - /// Initializes a new instance of . - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - /// is null. - public LexicalNormalizer(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of normalizer. - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - internal LexicalNormalizer(string oDataType, string name) - { - ODataType = oDataType; - Name = name; - } - - /// A URI fragment specifying the type of normalizer. - internal string ODataType { get; set; } - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizerName.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizerName.cs deleted file mode 100644 index 3e67080960aa..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalNormalizerName.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the names of all text normalizers supported by the search engine. - public readonly partial struct LexicalNormalizerName : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public LexicalNormalizerName(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string AsciiFoldingValue = "asciifolding"; - private const string ElisionValue = "elision"; - private const string LowercaseValue = "lowercase"; - private const string StandardValue = "standard"; - private const string UppercaseValue = "uppercase"; - - /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html. - public static LexicalNormalizerName AsciiFolding { get; } = new LexicalNormalizerName(AsciiFoldingValue); - /// Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html. - public static LexicalNormalizerName Elision { get; } = new LexicalNormalizerName(ElisionValue); - /// Normalizes token text to lowercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html. - public static LexicalNormalizerName Lowercase { get; } = new LexicalNormalizerName(LowercaseValue); - /// Standard normalizer, which consists of lowercase and asciifolding. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html. - public static LexicalNormalizerName Standard { get; } = new LexicalNormalizerName(StandardValue); - /// Normalizes token text to uppercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html. - public static LexicalNormalizerName Uppercase { get; } = new LexicalNormalizerName(UppercaseValue); - /// Determines if two values are the same. - public static bool operator ==(LexicalNormalizerName left, LexicalNormalizerName right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(LexicalNormalizerName left, LexicalNormalizerName right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator LexicalNormalizerName(string value) => new LexicalNormalizerName(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is LexicalNormalizerName other && Equals(other); - /// - public bool Equals(LexicalNormalizerName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.Serialization.cs deleted file mode 100644 index 308657a0bedf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.Serialization.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LexicalTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LexicalTokenizer DeserializeLexicalTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.ClassicTokenizer": return ClassicTokenizer.DeserializeClassicTokenizer(element); - case "#Microsoft.Azure.Search.EdgeNGramTokenizer": return EdgeNGramTokenizer.DeserializeEdgeNGramTokenizer(element); - case "#Microsoft.Azure.Search.KeywordTokenizer": return KeywordTokenizer.DeserializeKeywordTokenizer(element); - case "#Microsoft.Azure.Search.KeywordTokenizerV2": return KeywordTokenizer.DeserializeKeywordTokenizer(element); - case "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer": return MicrosoftLanguageStemmingTokenizer.DeserializeMicrosoftLanguageStemmingTokenizer(element); - case "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer": return MicrosoftLanguageTokenizer.DeserializeMicrosoftLanguageTokenizer(element); - case "#Microsoft.Azure.Search.NGramTokenizer": return NGramTokenizer.DeserializeNGramTokenizer(element); - case "#Microsoft.Azure.Search.PathHierarchyTokenizerV2": return PathHierarchyTokenizer.DeserializePathHierarchyTokenizer(element); - case "#Microsoft.Azure.Search.PatternTokenizer": return PatternTokenizer.DeserializePatternTokenizer(element); - case "#Microsoft.Azure.Search.StandardTokenizer": return LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(element); - case "#Microsoft.Azure.Search.StandardTokenizerV2": return LuceneStandardTokenizer.DeserializeLuceneStandardTokenizer(element); - case "#Microsoft.Azure.Search.UaxUrlEmailTokenizer": return UaxUrlEmailTokenizer.DeserializeUaxUrlEmailTokenizer(element); - } - } - return UnknownLexicalTokenizer.DeserializeUnknownLexicalTokenizer(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static LexicalTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLexicalTokenizer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.cs deleted file mode 100644 index d5e4a6c6413c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizer.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for tokenizers. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , and . - /// - public partial class LexicalTokenizer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal LexicalTokenizer(string oDataType, string name) - { - ODataType = oDataType; - Name = name; - } - - /// A URI fragment specifying the type of tokenizer. - internal string ODataType { get; set; } - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizerName.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizerName.cs deleted file mode 100644 index a573b2161d36..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LexicalTokenizerName.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the names of all tokenizers supported by the search engine. - public readonly partial struct LexicalTokenizerName : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public LexicalTokenizerName(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string ClassicValue = "classic"; - private const string EdgeNGramValue = "edgeNGram"; - private const string KeywordValue = "keyword_v2"; - private const string LetterValue = "letter"; - private const string LowercaseValue = "lowercase"; - private const string MicrosoftLanguageTokenizerValue = "microsoft_language_tokenizer"; - private const string MicrosoftLanguageStemmingTokenizerValue = "microsoft_language_stemming_tokenizer"; - private const string NGramValue = "nGram"; - private const string PathHierarchyValue = "path_hierarchy_v2"; - private const string PatternValue = "pattern"; - private const string StandardValue = "standard_v2"; - private const string UaxUrlEmailValue = "uax_url_email"; - private const string WhitespaceValue = "whitespace"; - - /// Grammar-based tokenizer that is suitable for processing most European-language documents. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicTokenizer.html. - public static LexicalTokenizerName Classic { get; } = new LexicalTokenizerName(ClassicValue); - /// Tokenizes the input from an edge into n-grams of the given size(s). See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenizer.html. - public static LexicalTokenizerName EdgeNGram { get; } = new LexicalTokenizerName(EdgeNGramValue); - /// Emits the entire input as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordTokenizer.html. - public static LexicalTokenizerName Keyword { get; } = new LexicalTokenizerName(KeywordValue); - /// Divides text at non-letters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LetterTokenizer.html. - public static LexicalTokenizerName Letter { get; } = new LexicalTokenizerName(LetterValue); - /// Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LowerCaseTokenizer.html. - public static LexicalTokenizerName Lowercase { get; } = new LexicalTokenizerName(LowercaseValue); - /// Divides text using language-specific rules. - public static LexicalTokenizerName MicrosoftLanguageTokenizer { get; } = new LexicalTokenizerName(MicrosoftLanguageTokenizerValue); - /// Divides text using language-specific rules and reduces words to their base forms. - public static LexicalTokenizerName MicrosoftLanguageStemmingTokenizer { get; } = new LexicalTokenizerName(MicrosoftLanguageStemmingTokenizerValue); - /// Tokenizes the input into n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenizer.html. - public static LexicalTokenizerName NGram { get; } = new LexicalTokenizerName(NGramValue); - /// Tokenizer for path-like hierarchies. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/path/PathHierarchyTokenizer.html. - public static LexicalTokenizerName PathHierarchy { get; } = new LexicalTokenizerName(PathHierarchyValue); - /// Tokenizer that uses regex pattern matching to construct distinct tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/pattern/PatternTokenizer.html. - public static LexicalTokenizerName Pattern { get; } = new LexicalTokenizerName(PatternValue); - /// Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/StandardTokenizer.html. - public static LexicalTokenizerName Standard { get; } = new LexicalTokenizerName(StandardValue); - /// Tokenizes urls and emails as one token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.html. - public static LexicalTokenizerName UaxUrlEmail { get; } = new LexicalTokenizerName(UaxUrlEmailValue); - /// Divides text at whitespace. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceTokenizer.html. - public static LexicalTokenizerName Whitespace { get; } = new LexicalTokenizerName(WhitespaceValue); - /// Determines if two values are the same. - public static bool operator ==(LexicalTokenizerName left, LexicalTokenizerName right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(LexicalTokenizerName left, LexicalTokenizerName right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator LexicalTokenizerName(string value) => new LexicalTokenizerName(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is LexicalTokenizerName other && Equals(other); - /// - public bool Equals(LexicalTokenizerName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.Serialization.cs deleted file mode 100644 index b4df7bcd37eb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.Serialization.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LimitTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenCount)) - { - writer.WritePropertyName("maxTokenCount"u8); - writer.WriteNumberValue(MaxTokenCount.Value); - } - if (Optional.IsDefined(ConsumeAllTokens)) - { - writer.WritePropertyName("consumeAllTokens"u8); - writer.WriteBooleanValue(ConsumeAllTokens.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LimitTokenFilter DeserializeLimitTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenCount = default; - bool? consumeAllTokens = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenCount"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenCount = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("consumeAllTokens"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - consumeAllTokens = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new LimitTokenFilter(odataType, name, maxTokenCount, consumeAllTokens); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new LimitTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLimitTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.cs deleted file mode 100644 index 0e5fc63519f6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LimitTokenFilter.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Limits the number of tokens while indexing. This token filter is implemented using Apache Lucene. - public partial class LimitTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public LimitTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.LimitTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum number of tokens to produce. Default is 1. - /// A value indicating whether all tokens from the input must be consumed even if maxTokenCount is reached. Default is false. - internal LimitTokenFilter(string oDataType, string name, int? maxTokenCount, bool? consumeAllTokens) : base(oDataType, name) - { - MaxTokenCount = maxTokenCount; - ConsumeAllTokens = consumeAllTokens; - ODataType = oDataType ?? "#Microsoft.Azure.Search.LimitTokenFilter"; - } - - /// The maximum number of tokens to produce. Default is 1. - public int? MaxTokenCount { get; set; } - /// A value indicating whether all tokens from the input must be consumed even if maxTokenCount is reached. Default is false. - public bool? ConsumeAllTokens { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.Serialization.cs deleted file mode 100644 index a9eb35a542ba..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListAliasesResult - { - internal static ListAliasesResult DeserializeListAliasesResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchAlias.DeserializeSearchAlias(item)); - } - value = array; - continue; - } - } - return new ListAliasesResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListAliasesResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListAliasesResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.cs deleted file mode 100644 index fa7098882a44..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListAliasesResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a List Aliases request. If successful, it includes the associated index mappings for all aliases. - internal partial class ListAliasesResult - { - /// Initializes a new instance of . - /// The aliases in the Search service. - internal ListAliasesResult(IEnumerable aliases) - { - Aliases = aliases.ToList(); - } - - /// Initializes a new instance of . - /// The aliases in the Search service. - internal ListAliasesResult(IReadOnlyList aliases) - { - Aliases = aliases; - } - - /// The aliases in the Search service. - public IReadOnlyList Aliases { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.Serialization.cs deleted file mode 100644 index 484b67d3a9df..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListDataSourcesResult - { - internal static ListDataSourcesResult DeserializeListDataSourcesResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(item)); - } - value = array; - continue; - } - } - return new ListDataSourcesResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListDataSourcesResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListDataSourcesResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.cs deleted file mode 100644 index ff14bbdc4a9c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListDataSourcesResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a List Datasources request. If successful, it includes the full definitions of all datasources. - internal partial class ListDataSourcesResult - { - /// Initializes a new instance of . - /// The datasources in the Search service. - internal ListDataSourcesResult(IEnumerable dataSources) - { - DataSources = dataSources.ToList(); - } - - /// Initializes a new instance of . - /// The datasources in the Search service. - internal ListDataSourcesResult(IReadOnlyList dataSources) - { - DataSources = dataSources; - } - - /// The datasources in the Search service. - public IReadOnlyList DataSources { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.Serialization.cs deleted file mode 100644 index 3d0f0714e9bb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListIndexersResult - { - internal static ListIndexersResult DeserializeListIndexersResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexer.DeserializeSearchIndexer(item)); - } - value = array; - continue; - } - } - return new ListIndexersResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListIndexersResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListIndexersResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.cs deleted file mode 100644 index 2f541fc5fe08..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexersResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a List Indexers request. If successful, it includes the full definitions of all indexers. - internal partial class ListIndexersResult - { - /// Initializes a new instance of . - /// The indexers in the Search service. - internal ListIndexersResult(IEnumerable indexers) - { - Indexers = indexers.ToList(); - } - - /// Initializes a new instance of . - /// The indexers in the Search service. - internal ListIndexersResult(IReadOnlyList indexers) - { - Indexers = indexers; - } - - /// The indexers in the Search service. - public IReadOnlyList Indexers { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.Serialization.cs deleted file mode 100644 index 61a1be1236cd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListIndexesResult - { - internal static ListIndexesResult DeserializeListIndexesResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndex.DeserializeSearchIndex(item)); - } - value = array; - continue; - } - } - return new ListIndexesResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListIndexesResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListIndexesResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.cs deleted file mode 100644 index 323d24fc4181..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListIndexesResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a List Indexes request. If successful, it includes the full definitions of all indexes. - internal partial class ListIndexesResult - { - /// Initializes a new instance of . - /// The indexes in the Search service. - internal ListIndexesResult(IEnumerable indexes) - { - Indexes = indexes.ToList(); - } - - /// Initializes a new instance of . - /// The indexes in the Search service. - internal ListIndexesResult(IReadOnlyList indexes) - { - Indexes = indexes; - } - - /// The indexes in the Search service. - public IReadOnlyList Indexes { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.Serialization.cs deleted file mode 100644 index 586f28a3adca..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListSkillsetsResult - { - internal static ListSkillsetsResult DeserializeListSkillsetsResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerSkillset.DeserializeSearchIndexerSkillset(item)); - } - value = array; - continue; - } - } - return new ListSkillsetsResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListSkillsetsResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListSkillsetsResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.cs deleted file mode 100644 index aae21ff90fb6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSkillsetsResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a list skillset request. If successful, it includes the full definitions of all skillsets. - internal partial class ListSkillsetsResult - { - /// Initializes a new instance of . - /// The skillsets defined in the Search service. - internal ListSkillsetsResult(IEnumerable skillsets) - { - Skillsets = skillsets.ToList(); - } - - /// Initializes a new instance of . - /// The skillsets defined in the Search service. - internal ListSkillsetsResult(IReadOnlyList skillsets) - { - Skillsets = skillsets; - } - - /// The skillsets defined in the Search service. - public IReadOnlyList Skillsets { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.Serialization.cs deleted file mode 100644 index 07d410387fab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class ListSynonymMapsResult - { - internal static ListSynonymMapsResult DeserializeListSynonymMapsResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SynonymMap.DeserializeSynonymMap(item)); - } - value = array; - continue; - } - } - return new ListSynonymMapsResult(value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ListSynonymMapsResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeListSynonymMapsResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.cs deleted file mode 100644 index 7374791d2d65..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ListSynonymMapsResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a List SynonymMaps request. If successful, it includes the full definitions of all synonym maps. - internal partial class ListSynonymMapsResult - { - /// Initializes a new instance of . - /// The synonym maps in the Search service. - internal ListSynonymMapsResult(IEnumerable synonymMaps) - { - SynonymMaps = synonymMaps.ToList(); - } - - /// Initializes a new instance of . - /// The synonym maps in the Search service. - internal ListSynonymMapsResult(IReadOnlyList synonymMaps) - { - SynonymMaps = synonymMaps; - } - - /// The synonym maps in the Search service. - public IReadOnlyList SynonymMaps { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.Serialization.cs deleted file mode 100644 index 61433b095e65..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.Serialization.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LuceneStandardAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - if (Optional.IsCollectionDefined(Stopwords)) - { - writer.WritePropertyName("stopwords"u8); - writer.WriteStartArray(); - foreach (var item in Stopwords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static LuceneStandardAnalyzer DeserializeLuceneStandardAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenLength = default; - IList stopwords = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("stopwords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - stopwords = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new LuceneStandardAnalyzer(odataType, name, maxTokenLength, stopwords ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new LuceneStandardAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLuceneStandardAnalyzer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.cs deleted file mode 100644 index 0f822d6248b0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardAnalyzer.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Standard Apache Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. - public partial class LuceneStandardAnalyzer : LexicalAnalyzer - { - /// Initializes a new instance of . - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public LuceneStandardAnalyzer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Stopwords = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.StandardAnalyzer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - /// A list of stopwords. - internal LuceneStandardAnalyzer(string oDataType, string name, int? maxTokenLength, IList stopwords) : base(oDataType, name) - { - MaxTokenLength = maxTokenLength; - Stopwords = stopwords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.StandardAnalyzer"; - } - - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - public int? MaxTokenLength { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.Serialization.cs deleted file mode 100644 index 31d8d1db1ed6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.Serialization.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class LuceneStandardTokenizer : IUtf8JsonSerializable - { - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new LuceneStandardTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeLuceneStandardTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.cs deleted file mode 100644 index 823541f9e403..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/LuceneStandardTokenizer.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. - public partial class LuceneStandardTokenizer : LexicalTokenizer - { - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.Serialization.cs deleted file mode 100644 index db8a124e1c57..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.Serialization.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MagnitudeScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("magnitude"u8); - writer.WriteObjectValue(Parameters); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static MagnitudeScoringFunction DeserializeMagnitudeScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - MagnitudeScoringParameters magnitude = default; - string type = default; - string fieldName = default; - double boost = default; - ScoringFunctionInterpolation? interpolation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("magnitude"u8)) - { - magnitude = MagnitudeScoringParameters.DeserializeMagnitudeScoringParameters(property.Value); - continue; - } - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("boost"u8)) - { - boost = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("interpolation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - interpolation = property.Value.GetString().ToScoringFunctionInterpolation(); - continue; - } - } - return new MagnitudeScoringFunction(type, fieldName, boost, interpolation, magnitude); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new MagnitudeScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMagnitudeScoringFunction(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.cs deleted file mode 100644 index e3eb898e2fe6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringFunction.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a function that boosts scores based on the magnitude of a numeric field. - public partial class MagnitudeScoringFunction : ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - /// Parameter values for the magnitude scoring function. - internal MagnitudeScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation, MagnitudeScoringParameters parameters) : base(type, fieldName, boost, interpolation) - { - Parameters = parameters; - Type = type ?? "magnitude"; - } - - /// Parameter values for the magnitude scoring function. - public MagnitudeScoringParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.Serialization.cs deleted file mode 100644 index 4870165a6f1c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MagnitudeScoringParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("boostingRangeStart"u8); - writer.WriteNumberValue(BoostingRangeStart); - writer.WritePropertyName("boostingRangeEnd"u8); - writer.WriteNumberValue(BoostingRangeEnd); - if (Optional.IsDefined(ShouldBoostBeyondRangeByConstant)) - { - writer.WritePropertyName("constantBoostBeyondRange"u8); - writer.WriteBooleanValue(ShouldBoostBeyondRangeByConstant.Value); - } - writer.WriteEndObject(); - } - - internal static MagnitudeScoringParameters DeserializeMagnitudeScoringParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double boostingRangeStart = default; - double boostingRangeEnd = default; - bool? constantBoostBeyondRange = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("boostingRangeStart"u8)) - { - boostingRangeStart = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("boostingRangeEnd"u8)) - { - boostingRangeEnd = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("constantBoostBeyondRange"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - constantBoostBeyondRange = property.Value.GetBoolean(); - continue; - } - } - return new MagnitudeScoringParameters(boostingRangeStart, boostingRangeEnd, constantBoostBeyondRange); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static MagnitudeScoringParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMagnitudeScoringParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.cs deleted file mode 100644 index b838605f7a74..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MagnitudeScoringParameters.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Provides parameter values to a magnitude scoring function. - public partial class MagnitudeScoringParameters - { - /// Initializes a new instance of . - /// The field value at which boosting starts. - /// The field value at which boosting ends. - public MagnitudeScoringParameters(double boostingRangeStart, double boostingRangeEnd) - { - BoostingRangeStart = boostingRangeStart; - BoostingRangeEnd = boostingRangeEnd; - } - - /// Initializes a new instance of . - /// The field value at which boosting starts. - /// The field value at which boosting ends. - /// A value indicating whether to apply a constant boost for field values beyond the range end value; default is false. - internal MagnitudeScoringParameters(double boostingRangeStart, double boostingRangeEnd, bool? shouldBoostBeyondRangeByConstant) - { - BoostingRangeStart = boostingRangeStart; - BoostingRangeEnd = boostingRangeEnd; - ShouldBoostBeyondRangeByConstant = shouldBoostBeyondRangeByConstant; - } - - /// The field value at which boosting starts. - public double BoostingRangeStart { get; set; } - /// The field value at which boosting ends. - public double BoostingRangeEnd { get; set; } - /// A value indicating whether to apply a constant boost for field values beyond the range end value; default is false. - public bool? ShouldBoostBeyondRangeByConstant { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.Serialization.cs deleted file mode 100644 index f2b503b4023d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.Serialization.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MappingCharFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("mappings"u8); - writer.WriteStartArray(); - foreach (var item in Mappings) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static MappingCharFilter DeserializeMappingCharFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList mappings = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("mappings"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - mappings = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new MappingCharFilter(odataType, name, mappings); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new MappingCharFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMappingCharFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.cs deleted file mode 100644 index 14598cafd754..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MappingCharFilter.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A character filter that applies mappings defined with the mappings option. Matching is greedy (longest pattern matching at a given point wins). Replacement is allowed to be the empty string. This character filter is implemented using Apache Lucene. - public partial class MappingCharFilter : CharFilter - { - /// Initializes a new instance of . - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of mappings of the following format: "a=>b" (all occurrences of the character "a" will be replaced with character "b"). - /// or is null. - public MappingCharFilter(string name, IEnumerable mappings) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(mappings, nameof(mappings)); - - Mappings = mappings.ToList(); - ODataType = "#Microsoft.Azure.Search.MappingCharFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of char filter. - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of mappings of the following format: "a=>b" (all occurrences of the character "a" will be replaced with character "b"). - internal MappingCharFilter(string oDataType, string name, IList mappings) : base(oDataType, name) - { - Mappings = mappings; - ODataType = oDataType ?? "#Microsoft.Azure.Search.MappingCharFilter"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownHeaderDepth.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownHeaderDepth.cs deleted file mode 100644 index eb933891ce05..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MarkdownHeaderDepth.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the max header depth that will be considered while grouping markdown content. Default is `h6`. - public readonly partial struct MarkdownHeaderDepth : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public MarkdownHeaderDepth(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string H1Value = "h1"; - private const string H2Value = "h2"; - private const string H3Value = "h3"; - private const string H4Value = "h4"; - private const string H5Value = "h5"; - private const string H6Value = "h6"; - - /// Indicates that headers up to a level of h1 will be considered while grouping markdown content. - public static MarkdownHeaderDepth H1 { get; } = new MarkdownHeaderDepth(H1Value); - /// Indicates that headers up to a level of h2 will be considered while grouping markdown content. - public static MarkdownHeaderDepth H2 { get; } = new MarkdownHeaderDepth(H2Value); - /// Indicates that headers up to a level of h3 will be considered while grouping markdown content. - public static MarkdownHeaderDepth H3 { get; } = new MarkdownHeaderDepth(H3Value); - /// Indicates that headers up to a level of h4 will be considered while grouping markdown content. - public static MarkdownHeaderDepth H4 { get; } = new MarkdownHeaderDepth(H4Value); - /// Indicates that headers up to a level of h5 will be considered while grouping markdown content. - public static MarkdownHeaderDepth H5 { get; } = new MarkdownHeaderDepth(H5Value); - /// Indicates that headers up to a level of h6 will be considered while grouping markdown content. This is the default. - public static MarkdownHeaderDepth H6 { get; } = new MarkdownHeaderDepth(H6Value); - /// Determines if two values are the same. - public static bool operator ==(MarkdownHeaderDepth left, MarkdownHeaderDepth right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(MarkdownHeaderDepth left, MarkdownHeaderDepth right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator MarkdownHeaderDepth(string value) => new MarkdownHeaderDepth(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is MarkdownHeaderDepth other && Equals(other); - /// - public bool Equals(MarkdownHeaderDepth other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.Serialization.cs deleted file mode 100644 index 405b7a8d5eb8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.Serialization.cs +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MergeSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(InsertPreTag)) - { - writer.WritePropertyName("insertPreTag"u8); - writer.WriteStringValue(InsertPreTag); - } - if (Optional.IsDefined(InsertPostTag)) - { - writer.WritePropertyName("insertPostTag"u8); - writer.WriteStringValue(InsertPostTag); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static MergeSkill DeserializeMergeSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string insertPreTag = default; - string insertPostTag = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("insertPreTag"u8)) - { - insertPreTag = property.Value.GetString(); - continue; - } - if (property.NameEquals("insertPostTag"u8)) - { - insertPostTag = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new MergeSkill( - odataType, - name, - description, - context, - inputs, - outputs, - insertPreTag, - insertPostTag); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new MergeSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMergeSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.cs deleted file mode 100644 index 801f0e9eba9a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MergeSkill.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill for merging two or more strings into a single unified string, with an optional user-defined delimiter separating each component part. - public partial class MergeSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public MergeSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.MergeSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The tag indicates the start of the merged text. By default, the tag is an empty space. - /// The tag indicates the end of the merged text. By default, the tag is an empty space. - internal MergeSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string insertPreTag, string insertPostTag) : base(oDataType, name, description, context, inputs, outputs) - { - InsertPreTag = insertPreTag; - InsertPostTag = insertPostTag; - ODataType = oDataType ?? "#Microsoft.Skills.Text.MergeSkill"; - } - - /// The tag indicates the start of the merged text. By default, the tag is an empty space. - public string InsertPreTag { get; set; } - /// The tag indicates the end of the merged text. By default, the tag is an empty space. - public string InsertPostTag { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.Serialization.cs deleted file mode 100644 index 7542955042f6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.Serialization.cs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MicrosoftLanguageStemmingTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - if (Optional.IsDefined(IsSearchTokenizer)) - { - writer.WritePropertyName("isSearchTokenizer"u8); - writer.WriteBooleanValue(IsSearchTokenizer.Value); - } - if (Optional.IsDefined(Language)) - { - writer.WritePropertyName("language"u8); - writer.WriteStringValue(Language.Value.ToSerialString()); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static MicrosoftLanguageStemmingTokenizer DeserializeMicrosoftLanguageStemmingTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenLength = default; - bool? isSearchTokenizer = default; - MicrosoftStemmingTokenizerLanguage? language = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("isSearchTokenizer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - isSearchTokenizer = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("language"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - language = property.Value.GetString().ToMicrosoftStemmingTokenizerLanguage(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new MicrosoftLanguageStemmingTokenizer(odataType, name, maxTokenLength, isSearchTokenizer, language); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new MicrosoftLanguageStemmingTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMicrosoftLanguageStemmingTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.cs deleted file mode 100644 index 60f00ae9d371..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageStemmingTokenizer.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Divides text using language-specific rules and reduces words to their base forms. - public partial class MicrosoftLanguageStemmingTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public MicrosoftLanguageStemmingTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. - /// A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. - /// The language to use. The default is English. - internal MicrosoftLanguageStemmingTokenizer(string oDataType, string name, int? maxTokenLength, bool? isSearchTokenizer, MicrosoftStemmingTokenizerLanguage? language) : base(oDataType, name) - { - MaxTokenLength = maxTokenLength; - IsSearchTokenizer = isSearchTokenizer; - Language = language; - ODataType = oDataType ?? "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; - } - - /// The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. - public int? MaxTokenLength { get; set; } - /// A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. - public bool? IsSearchTokenizer { get; set; } - /// The language to use. The default is English. - public MicrosoftStemmingTokenizerLanguage? Language { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.Serialization.cs deleted file mode 100644 index 7053ccd820ec..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.Serialization.cs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class MicrosoftLanguageTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - if (Optional.IsDefined(IsSearchTokenizer)) - { - writer.WritePropertyName("isSearchTokenizer"u8); - writer.WriteBooleanValue(IsSearchTokenizer.Value); - } - if (Optional.IsDefined(Language)) - { - writer.WritePropertyName("language"u8); - writer.WriteStringValue(Language.Value.ToSerialString()); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static MicrosoftLanguageTokenizer DeserializeMicrosoftLanguageTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenLength = default; - bool? isSearchTokenizer = default; - MicrosoftTokenizerLanguage? language = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("isSearchTokenizer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - isSearchTokenizer = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("language"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - language = property.Value.GetString().ToMicrosoftTokenizerLanguage(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new MicrosoftLanguageTokenizer(odataType, name, maxTokenLength, isSearchTokenizer, language); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new MicrosoftLanguageTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeMicrosoftLanguageTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.cs deleted file mode 100644 index 44e38f47db40..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftLanguageTokenizer.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Divides text using language-specific rules. - public partial class MicrosoftLanguageTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public MicrosoftLanguageTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. - /// A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. - /// The language to use. The default is English. - internal MicrosoftLanguageTokenizer(string oDataType, string name, int? maxTokenLength, bool? isSearchTokenizer, MicrosoftTokenizerLanguage? language) : base(oDataType, name) - { - MaxTokenLength = maxTokenLength; - IsSearchTokenizer = isSearchTokenizer; - Language = language; - ODataType = oDataType ?? "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; - } - - /// The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. - public int? MaxTokenLength { get; set; } - /// A value indicating how the tokenizer is used. Set to true if used as the search tokenizer, set to false if used as the indexing tokenizer. Default is false. - public bool? IsSearchTokenizer { get; set; } - /// The language to use. The default is English. - public MicrosoftTokenizerLanguage? Language { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.Serialization.cs deleted file mode 100644 index 21ab03cfa2c5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.Serialization.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class MicrosoftStemmingTokenizerLanguageExtensions - { - public static string ToSerialString(this MicrosoftStemmingTokenizerLanguage value) => value switch - { - MicrosoftStemmingTokenizerLanguage.Arabic => "arabic", - MicrosoftStemmingTokenizerLanguage.Bangla => "bangla", - MicrosoftStemmingTokenizerLanguage.Bulgarian => "bulgarian", - MicrosoftStemmingTokenizerLanguage.Catalan => "catalan", - MicrosoftStemmingTokenizerLanguage.Croatian => "croatian", - MicrosoftStemmingTokenizerLanguage.Czech => "czech", - MicrosoftStemmingTokenizerLanguage.Danish => "danish", - MicrosoftStemmingTokenizerLanguage.Dutch => "dutch", - MicrosoftStemmingTokenizerLanguage.English => "english", - MicrosoftStemmingTokenizerLanguage.Estonian => "estonian", - MicrosoftStemmingTokenizerLanguage.Finnish => "finnish", - MicrosoftStemmingTokenizerLanguage.French => "french", - MicrosoftStemmingTokenizerLanguage.German => "german", - MicrosoftStemmingTokenizerLanguage.Greek => "greek", - MicrosoftStemmingTokenizerLanguage.Gujarati => "gujarati", - MicrosoftStemmingTokenizerLanguage.Hebrew => "hebrew", - MicrosoftStemmingTokenizerLanguage.Hindi => "hindi", - MicrosoftStemmingTokenizerLanguage.Hungarian => "hungarian", - MicrosoftStemmingTokenizerLanguage.Icelandic => "icelandic", - MicrosoftStemmingTokenizerLanguage.Indonesian => "indonesian", - MicrosoftStemmingTokenizerLanguage.Italian => "italian", - MicrosoftStemmingTokenizerLanguage.Kannada => "kannada", - MicrosoftStemmingTokenizerLanguage.Latvian => "latvian", - MicrosoftStemmingTokenizerLanguage.Lithuanian => "lithuanian", - MicrosoftStemmingTokenizerLanguage.Malay => "malay", - MicrosoftStemmingTokenizerLanguage.Malayalam => "malayalam", - MicrosoftStemmingTokenizerLanguage.Marathi => "marathi", - MicrosoftStemmingTokenizerLanguage.NorwegianBokmaal => "norwegianBokmaal", - MicrosoftStemmingTokenizerLanguage.Polish => "polish", - MicrosoftStemmingTokenizerLanguage.Portuguese => "portuguese", - MicrosoftStemmingTokenizerLanguage.PortugueseBrazilian => "portugueseBrazilian", - MicrosoftStemmingTokenizerLanguage.Punjabi => "punjabi", - MicrosoftStemmingTokenizerLanguage.Romanian => "romanian", - MicrosoftStemmingTokenizerLanguage.Russian => "russian", - MicrosoftStemmingTokenizerLanguage.SerbianCyrillic => "serbianCyrillic", - MicrosoftStemmingTokenizerLanguage.SerbianLatin => "serbianLatin", - MicrosoftStemmingTokenizerLanguage.Slovak => "slovak", - MicrosoftStemmingTokenizerLanguage.Slovenian => "slovenian", - MicrosoftStemmingTokenizerLanguage.Spanish => "spanish", - MicrosoftStemmingTokenizerLanguage.Swedish => "swedish", - MicrosoftStemmingTokenizerLanguage.Tamil => "tamil", - MicrosoftStemmingTokenizerLanguage.Telugu => "telugu", - MicrosoftStemmingTokenizerLanguage.Turkish => "turkish", - MicrosoftStemmingTokenizerLanguage.Ukrainian => "ukrainian", - MicrosoftStemmingTokenizerLanguage.Urdu => "urdu", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown MicrosoftStemmingTokenizerLanguage value.") - }; - - public static MicrosoftStemmingTokenizerLanguage ToMicrosoftStemmingTokenizerLanguage(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "arabic")) return MicrosoftStemmingTokenizerLanguage.Arabic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bangla")) return MicrosoftStemmingTokenizerLanguage.Bangla; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bulgarian")) return MicrosoftStemmingTokenizerLanguage.Bulgarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "catalan")) return MicrosoftStemmingTokenizerLanguage.Catalan; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "croatian")) return MicrosoftStemmingTokenizerLanguage.Croatian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "czech")) return MicrosoftStemmingTokenizerLanguage.Czech; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "danish")) return MicrosoftStemmingTokenizerLanguage.Danish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutch")) return MicrosoftStemmingTokenizerLanguage.Dutch; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "english")) return MicrosoftStemmingTokenizerLanguage.English; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "estonian")) return MicrosoftStemmingTokenizerLanguage.Estonian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "finnish")) return MicrosoftStemmingTokenizerLanguage.Finnish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "french")) return MicrosoftStemmingTokenizerLanguage.French; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german")) return MicrosoftStemmingTokenizerLanguage.German; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "greek")) return MicrosoftStemmingTokenizerLanguage.Greek; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "gujarati")) return MicrosoftStemmingTokenizerLanguage.Gujarati; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hebrew")) return MicrosoftStemmingTokenizerLanguage.Hebrew; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hindi")) return MicrosoftStemmingTokenizerLanguage.Hindi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hungarian")) return MicrosoftStemmingTokenizerLanguage.Hungarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "icelandic")) return MicrosoftStemmingTokenizerLanguage.Icelandic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "indonesian")) return MicrosoftStemmingTokenizerLanguage.Indonesian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "italian")) return MicrosoftStemmingTokenizerLanguage.Italian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "kannada")) return MicrosoftStemmingTokenizerLanguage.Kannada; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "latvian")) return MicrosoftStemmingTokenizerLanguage.Latvian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lithuanian")) return MicrosoftStemmingTokenizerLanguage.Lithuanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "malay")) return MicrosoftStemmingTokenizerLanguage.Malay; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "malayalam")) return MicrosoftStemmingTokenizerLanguage.Malayalam; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "marathi")) return MicrosoftStemmingTokenizerLanguage.Marathi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "norwegianBokmaal")) return MicrosoftStemmingTokenizerLanguage.NorwegianBokmaal; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "polish")) return MicrosoftStemmingTokenizerLanguage.Polish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portuguese")) return MicrosoftStemmingTokenizerLanguage.Portuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portugueseBrazilian")) return MicrosoftStemmingTokenizerLanguage.PortugueseBrazilian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "punjabi")) return MicrosoftStemmingTokenizerLanguage.Punjabi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "romanian")) return MicrosoftStemmingTokenizerLanguage.Romanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "russian")) return MicrosoftStemmingTokenizerLanguage.Russian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "serbianCyrillic")) return MicrosoftStemmingTokenizerLanguage.SerbianCyrillic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "serbianLatin")) return MicrosoftStemmingTokenizerLanguage.SerbianLatin; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "slovak")) return MicrosoftStemmingTokenizerLanguage.Slovak; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "slovenian")) return MicrosoftStemmingTokenizerLanguage.Slovenian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "spanish")) return MicrosoftStemmingTokenizerLanguage.Spanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "swedish")) return MicrosoftStemmingTokenizerLanguage.Swedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "tamil")) return MicrosoftStemmingTokenizerLanguage.Tamil; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "telugu")) return MicrosoftStemmingTokenizerLanguage.Telugu; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "turkish")) return MicrosoftStemmingTokenizerLanguage.Turkish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "ukrainian")) return MicrosoftStemmingTokenizerLanguage.Ukrainian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "urdu")) return MicrosoftStemmingTokenizerLanguage.Urdu; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown MicrosoftStemmingTokenizerLanguage value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.cs deleted file mode 100644 index 6b04031a3d0f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftStemmingTokenizerLanguage.cs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Lists the languages supported by the Microsoft language stemming tokenizer. - public enum MicrosoftStemmingTokenizerLanguage - { - /// Selects the Microsoft stemming tokenizer for Arabic. - Arabic, - /// Selects the Microsoft stemming tokenizer for Bangla. - Bangla, - /// Selects the Microsoft stemming tokenizer for Bulgarian. - Bulgarian, - /// Selects the Microsoft stemming tokenizer for Catalan. - Catalan, - /// Selects the Microsoft stemming tokenizer for Croatian. - Croatian, - /// Selects the Microsoft stemming tokenizer for Czech. - Czech, - /// Selects the Microsoft stemming tokenizer for Danish. - Danish, - /// Selects the Microsoft stemming tokenizer for Dutch. - Dutch, - /// Selects the Microsoft stemming tokenizer for English. - English, - /// Selects the Microsoft stemming tokenizer for Estonian. - Estonian, - /// Selects the Microsoft stemming tokenizer for Finnish. - Finnish, - /// Selects the Microsoft stemming tokenizer for French. - French, - /// Selects the Microsoft stemming tokenizer for German. - German, - /// Selects the Microsoft stemming tokenizer for Greek. - Greek, - /// Selects the Microsoft stemming tokenizer for Gujarati. - Gujarati, - /// Selects the Microsoft stemming tokenizer for Hebrew. - Hebrew, - /// Selects the Microsoft stemming tokenizer for Hindi. - Hindi, - /// Selects the Microsoft stemming tokenizer for Hungarian. - Hungarian, - /// Selects the Microsoft stemming tokenizer for Icelandic. - Icelandic, - /// Selects the Microsoft stemming tokenizer for Indonesian. - Indonesian, - /// Selects the Microsoft stemming tokenizer for Italian. - Italian, - /// Selects the Microsoft stemming tokenizer for Kannada. - Kannada, - /// Selects the Microsoft stemming tokenizer for Latvian. - Latvian, - /// Selects the Microsoft stemming tokenizer for Lithuanian. - Lithuanian, - /// Selects the Microsoft stemming tokenizer for Malay. - Malay, - /// Selects the Microsoft stemming tokenizer for Malayalam. - Malayalam, - /// Selects the Microsoft stemming tokenizer for Marathi. - Marathi, - /// Selects the Microsoft stemming tokenizer for Norwegian (Bokmål). - NorwegianBokmaal, - /// Selects the Microsoft stemming tokenizer for Polish. - Polish, - /// Selects the Microsoft stemming tokenizer for Portuguese. - Portuguese, - /// Selects the Microsoft stemming tokenizer for Portuguese (Brazil). - PortugueseBrazilian, - /// Selects the Microsoft stemming tokenizer for Punjabi. - Punjabi, - /// Selects the Microsoft stemming tokenizer for Romanian. - Romanian, - /// Selects the Microsoft stemming tokenizer for Russian. - Russian, - /// Selects the Microsoft stemming tokenizer for Serbian (Cyrillic). - SerbianCyrillic, - /// Selects the Microsoft stemming tokenizer for Serbian (Latin). - SerbianLatin, - /// Selects the Microsoft stemming tokenizer for Slovak. - Slovak, - /// Selects the Microsoft stemming tokenizer for Slovenian. - Slovenian, - /// Selects the Microsoft stemming tokenizer for Spanish. - Spanish, - /// Selects the Microsoft stemming tokenizer for Swedish. - Swedish, - /// Selects the Microsoft stemming tokenizer for Tamil. - Tamil, - /// Selects the Microsoft stemming tokenizer for Telugu. - Telugu, - /// Selects the Microsoft stemming tokenizer for Turkish. - Turkish, - /// Selects the Microsoft stemming tokenizer for Ukrainian. - Ukrainian, - /// Selects the Microsoft stemming tokenizer for Urdu. - Urdu - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.Serialization.cs deleted file mode 100644 index 69c7798a2bfa..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.Serialization.cs +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class MicrosoftTokenizerLanguageExtensions - { - public static string ToSerialString(this MicrosoftTokenizerLanguage value) => value switch - { - MicrosoftTokenizerLanguage.Bangla => "bangla", - MicrosoftTokenizerLanguage.Bulgarian => "bulgarian", - MicrosoftTokenizerLanguage.Catalan => "catalan", - MicrosoftTokenizerLanguage.ChineseSimplified => "chineseSimplified", - MicrosoftTokenizerLanguage.ChineseTraditional => "chineseTraditional", - MicrosoftTokenizerLanguage.Croatian => "croatian", - MicrosoftTokenizerLanguage.Czech => "czech", - MicrosoftTokenizerLanguage.Danish => "danish", - MicrosoftTokenizerLanguage.Dutch => "dutch", - MicrosoftTokenizerLanguage.English => "english", - MicrosoftTokenizerLanguage.French => "french", - MicrosoftTokenizerLanguage.German => "german", - MicrosoftTokenizerLanguage.Greek => "greek", - MicrosoftTokenizerLanguage.Gujarati => "gujarati", - MicrosoftTokenizerLanguage.Hindi => "hindi", - MicrosoftTokenizerLanguage.Icelandic => "icelandic", - MicrosoftTokenizerLanguage.Indonesian => "indonesian", - MicrosoftTokenizerLanguage.Italian => "italian", - MicrosoftTokenizerLanguage.Japanese => "japanese", - MicrosoftTokenizerLanguage.Kannada => "kannada", - MicrosoftTokenizerLanguage.Korean => "korean", - MicrosoftTokenizerLanguage.Malay => "malay", - MicrosoftTokenizerLanguage.Malayalam => "malayalam", - MicrosoftTokenizerLanguage.Marathi => "marathi", - MicrosoftTokenizerLanguage.NorwegianBokmaal => "norwegianBokmaal", - MicrosoftTokenizerLanguage.Polish => "polish", - MicrosoftTokenizerLanguage.Portuguese => "portuguese", - MicrosoftTokenizerLanguage.PortugueseBrazilian => "portugueseBrazilian", - MicrosoftTokenizerLanguage.Punjabi => "punjabi", - MicrosoftTokenizerLanguage.Romanian => "romanian", - MicrosoftTokenizerLanguage.Russian => "russian", - MicrosoftTokenizerLanguage.SerbianCyrillic => "serbianCyrillic", - MicrosoftTokenizerLanguage.SerbianLatin => "serbianLatin", - MicrosoftTokenizerLanguage.Slovenian => "slovenian", - MicrosoftTokenizerLanguage.Spanish => "spanish", - MicrosoftTokenizerLanguage.Swedish => "swedish", - MicrosoftTokenizerLanguage.Tamil => "tamil", - MicrosoftTokenizerLanguage.Telugu => "telugu", - MicrosoftTokenizerLanguage.Thai => "thai", - MicrosoftTokenizerLanguage.Ukrainian => "ukrainian", - MicrosoftTokenizerLanguage.Urdu => "urdu", - MicrosoftTokenizerLanguage.Vietnamese => "vietnamese", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown MicrosoftTokenizerLanguage value.") - }; - - public static MicrosoftTokenizerLanguage ToMicrosoftTokenizerLanguage(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bangla")) return MicrosoftTokenizerLanguage.Bangla; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bulgarian")) return MicrosoftTokenizerLanguage.Bulgarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "catalan")) return MicrosoftTokenizerLanguage.Catalan; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "chineseSimplified")) return MicrosoftTokenizerLanguage.ChineseSimplified; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "chineseTraditional")) return MicrosoftTokenizerLanguage.ChineseTraditional; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "croatian")) return MicrosoftTokenizerLanguage.Croatian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "czech")) return MicrosoftTokenizerLanguage.Czech; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "danish")) return MicrosoftTokenizerLanguage.Danish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutch")) return MicrosoftTokenizerLanguage.Dutch; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "english")) return MicrosoftTokenizerLanguage.English; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "french")) return MicrosoftTokenizerLanguage.French; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german")) return MicrosoftTokenizerLanguage.German; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "greek")) return MicrosoftTokenizerLanguage.Greek; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "gujarati")) return MicrosoftTokenizerLanguage.Gujarati; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hindi")) return MicrosoftTokenizerLanguage.Hindi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "icelandic")) return MicrosoftTokenizerLanguage.Icelandic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "indonesian")) return MicrosoftTokenizerLanguage.Indonesian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "italian")) return MicrosoftTokenizerLanguage.Italian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "japanese")) return MicrosoftTokenizerLanguage.Japanese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "kannada")) return MicrosoftTokenizerLanguage.Kannada; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "korean")) return MicrosoftTokenizerLanguage.Korean; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "malay")) return MicrosoftTokenizerLanguage.Malay; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "malayalam")) return MicrosoftTokenizerLanguage.Malayalam; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "marathi")) return MicrosoftTokenizerLanguage.Marathi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "norwegianBokmaal")) return MicrosoftTokenizerLanguage.NorwegianBokmaal; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "polish")) return MicrosoftTokenizerLanguage.Polish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portuguese")) return MicrosoftTokenizerLanguage.Portuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portugueseBrazilian")) return MicrosoftTokenizerLanguage.PortugueseBrazilian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "punjabi")) return MicrosoftTokenizerLanguage.Punjabi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "romanian")) return MicrosoftTokenizerLanguage.Romanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "russian")) return MicrosoftTokenizerLanguage.Russian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "serbianCyrillic")) return MicrosoftTokenizerLanguage.SerbianCyrillic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "serbianLatin")) return MicrosoftTokenizerLanguage.SerbianLatin; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "slovenian")) return MicrosoftTokenizerLanguage.Slovenian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "spanish")) return MicrosoftTokenizerLanguage.Spanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "swedish")) return MicrosoftTokenizerLanguage.Swedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "tamil")) return MicrosoftTokenizerLanguage.Tamil; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "telugu")) return MicrosoftTokenizerLanguage.Telugu; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "thai")) return MicrosoftTokenizerLanguage.Thai; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "ukrainian")) return MicrosoftTokenizerLanguage.Ukrainian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "urdu")) return MicrosoftTokenizerLanguage.Urdu; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "vietnamese")) return MicrosoftTokenizerLanguage.Vietnamese; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown MicrosoftTokenizerLanguage value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.cs deleted file mode 100644 index ba91cc79b605..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/MicrosoftTokenizerLanguage.cs +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Lists the languages supported by the Microsoft language tokenizer. - public enum MicrosoftTokenizerLanguage - { - /// Selects the Microsoft tokenizer for Bangla. - Bangla, - /// Selects the Microsoft tokenizer for Bulgarian. - Bulgarian, - /// Selects the Microsoft tokenizer for Catalan. - Catalan, - /// Selects the Microsoft tokenizer for Chinese (Simplified). - ChineseSimplified, - /// Selects the Microsoft tokenizer for Chinese (Traditional). - ChineseTraditional, - /// Selects the Microsoft tokenizer for Croatian. - Croatian, - /// Selects the Microsoft tokenizer for Czech. - Czech, - /// Selects the Microsoft tokenizer for Danish. - Danish, - /// Selects the Microsoft tokenizer for Dutch. - Dutch, - /// Selects the Microsoft tokenizer for English. - English, - /// Selects the Microsoft tokenizer for French. - French, - /// Selects the Microsoft tokenizer for German. - German, - /// Selects the Microsoft tokenizer for Greek. - Greek, - /// Selects the Microsoft tokenizer for Gujarati. - Gujarati, - /// Selects the Microsoft tokenizer for Hindi. - Hindi, - /// Selects the Microsoft tokenizer for Icelandic. - Icelandic, - /// Selects the Microsoft tokenizer for Indonesian. - Indonesian, - /// Selects the Microsoft tokenizer for Italian. - Italian, - /// Selects the Microsoft tokenizer for Japanese. - Japanese, - /// Selects the Microsoft tokenizer for Kannada. - Kannada, - /// Selects the Microsoft tokenizer for Korean. - Korean, - /// Selects the Microsoft tokenizer for Malay. - Malay, - /// Selects the Microsoft tokenizer for Malayalam. - Malayalam, - /// Selects the Microsoft tokenizer for Marathi. - Marathi, - /// Selects the Microsoft tokenizer for Norwegian (Bokmål). - NorwegianBokmaal, - /// Selects the Microsoft tokenizer for Polish. - Polish, - /// Selects the Microsoft tokenizer for Portuguese. - Portuguese, - /// Selects the Microsoft tokenizer for Portuguese (Brazil). - PortugueseBrazilian, - /// Selects the Microsoft tokenizer for Punjabi. - Punjabi, - /// Selects the Microsoft tokenizer for Romanian. - Romanian, - /// Selects the Microsoft tokenizer for Russian. - Russian, - /// Selects the Microsoft tokenizer for Serbian (Cyrillic). - SerbianCyrillic, - /// Selects the Microsoft tokenizer for Serbian (Latin). - SerbianLatin, - /// Selects the Microsoft tokenizer for Slovenian. - Slovenian, - /// Selects the Microsoft tokenizer for Spanish. - Spanish, - /// Selects the Microsoft tokenizer for Swedish. - Swedish, - /// Selects the Microsoft tokenizer for Tamil. - Tamil, - /// Selects the Microsoft tokenizer for Telugu. - Telugu, - /// Selects the Microsoft tokenizer for Thai. - Thai, - /// Selects the Microsoft tokenizer for Ukrainian. - Ukrainian, - /// Selects the Microsoft tokenizer for Urdu. - Urdu, - /// Selects the Microsoft tokenizer for Vietnamese. - Vietnamese - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.Serialization.cs deleted file mode 100644 index 1c337ea49060..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.Serialization.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class NGramTokenFilter : IUtf8JsonSerializable - { - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new NGramTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeNGramTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.cs deleted file mode 100644 index 423858fa3c89..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenFilter.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Generates n-grams of the given size(s). This token filter is implemented using Apache Lucene. - public partial class NGramTokenFilter : TokenFilter - { - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.Serialization.cs deleted file mode 100644 index 6ba59a63e9f9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.Serialization.cs +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class NGramTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MinGram)) - { - writer.WritePropertyName("minGram"u8); - writer.WriteNumberValue(MinGram.Value); - } - if (Optional.IsDefined(MaxGram)) - { - writer.WritePropertyName("maxGram"u8); - writer.WriteNumberValue(MaxGram.Value); - } - if (Optional.IsCollectionDefined(TokenChars)) - { - writer.WritePropertyName("tokenChars"u8); - writer.WriteStartArray(); - foreach (var item in TokenChars) - { - writer.WriteStringValue(item.ToSerialString()); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static NGramTokenizer DeserializeNGramTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? minGram = default; - int? maxGram = default; - IList tokenChars = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("minGram"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - minGram = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxGram"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxGram = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("tokenChars"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString().ToTokenCharacterKind()); - } - tokenChars = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new NGramTokenizer(odataType, name, minGram, maxGram, tokenChars ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new NGramTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeNGramTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.cs deleted file mode 100644 index 5b43dc4e8d67..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NGramTokenizer.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Tokenizes the input into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. - public partial class NGramTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public NGramTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - TokenChars = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.NGramTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. - /// The maximum n-gram length. Default is 2. Maximum is 300. - /// Character classes to keep in the tokens. - internal NGramTokenizer(string oDataType, string name, int? minGram, int? maxGram, IList tokenChars) : base(oDataType, name) - { - MinGram = minGram; - MaxGram = maxGram; - TokenChars = tokenChars; - ODataType = oDataType ?? "#Microsoft.Azure.Search.NGramTokenizer"; - } - - /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. - public int? MinGram { get; set; } - /// The maximum n-gram length. Default is 2. Maximum is 300. - public int? MaxGram { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs deleted file mode 100644 index 251f91780504..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class NativeBlobSoftDeleteDeletionDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static NativeBlobSoftDeleteDeletionDetectionPolicy DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new NativeBlobSoftDeleteDeletionDetectionPolicy(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new NativeBlobSoftDeleteDeletionDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.cs deleted file mode 100644 index 959cb8a79fcf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/NativeBlobSoftDeleteDeletionDetectionPolicy.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a data deletion detection policy utilizing Azure Blob Storage's native soft delete feature for deletion detection. - public partial class NativeBlobSoftDeleteDeletionDetectionPolicy : DataDeletionDetectionPolicy - { - /// Initializes a new instance of . - public NativeBlobSoftDeleteDeletionDetectionPolicy() - { - ODataType = "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of data deletion detection policy. - internal NativeBlobSoftDeleteDeletionDetectionPolicy(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.Serialization.cs deleted file mode 100644 index af2075198e02..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.Serialization.cs +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class OcrSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(ShouldDetectOrientation)) - { - if (ShouldDetectOrientation != null) - { - writer.WritePropertyName("detectOrientation"u8); - writer.WriteBooleanValue(ShouldDetectOrientation.Value); - } - else - { - writer.WriteNull("detectOrientation"); - } - } - if (Optional.IsDefined(LineEnding)) - { - writer.WritePropertyName("lineEnding"u8); - writer.WriteStringValue(LineEnding.Value.ToString()); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static OcrSkill DeserializeOcrSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - OcrSkillLanguage? defaultLanguageCode = default; - bool? detectOrientation = default; - OcrLineEnding? lineEnding = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new OcrSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("detectOrientation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - detectOrientation = null; - continue; - } - detectOrientation = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("lineEnding"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - lineEnding = new OcrLineEnding(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new OcrSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - detectOrientation, - lineEnding); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new OcrSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeOcrSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.cs deleted file mode 100644 index 07a1c8208686..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkill.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that extracts text from image files. - public partial class OcrSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public OcrSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Vision.OcrSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A value indicating to turn orientation detection on or not. Default is false. - /// Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is "space". - internal OcrSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, OcrSkillLanguage? defaultLanguageCode, bool? shouldDetectOrientation, OcrLineEnding? lineEnding) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - ShouldDetectOrientation = shouldDetectOrientation; - LineEnding = lineEnding; - ODataType = oDataType ?? "#Microsoft.Skills.Vision.OcrSkill"; - } - /// Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is "space". - public OcrLineEnding? LineEnding { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.Serialization.cs deleted file mode 100644 index 71c35b52d31f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class OutputFieldMappingEntry : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(TargetName)) - { - writer.WritePropertyName("targetName"u8); - writer.WriteStringValue(TargetName); - } - writer.WriteEndObject(); - } - - internal static OutputFieldMappingEntry DeserializeOutputFieldMappingEntry(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string targetName = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("targetName"u8)) - { - targetName = property.Value.GetString(); - continue; - } - } - return new OutputFieldMappingEntry(name, targetName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static OutputFieldMappingEntry FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeOutputFieldMappingEntry(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.cs deleted file mode 100644 index 76ee37088e8a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OutputFieldMappingEntry.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Output field mapping for a skill. - public partial class OutputFieldMappingEntry - { - /// Initializes a new instance of . - /// The name of the output defined by the skill. - /// is null. - public OutputFieldMappingEntry(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - } - - /// Initializes a new instance of . - /// The name of the output defined by the skill. - /// The target name of the output. It is optional and default to name. - internal OutputFieldMappingEntry(string name, string targetName) - { - Name = name; - TargetName = targetName; - } - - /// The name of the output defined by the skill. - public string Name { get; set; } - /// The target name of the output. It is optional and default to name. - public string TargetName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.Serialization.cs deleted file mode 100644 index 7d6001aee909..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.Serialization.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PathHierarchyTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Delimiter)) - { - writer.WritePropertyName("delimiter"u8); - writer.WriteStringValue(Delimiter.Value); - } - if (Optional.IsDefined(Replacement)) - { - writer.WritePropertyName("replacement"u8); - writer.WriteStringValue(Replacement.Value); - } - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - if (Optional.IsDefined(ReverseTokenOrder)) - { - writer.WritePropertyName("reverse"u8); - writer.WriteBooleanValue(ReverseTokenOrder.Value); - } - if (Optional.IsDefined(NumberOfTokensToSkip)) - { - writer.WritePropertyName("skip"u8); - writer.WriteNumberValue(NumberOfTokensToSkip.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PathHierarchyTokenizer DeserializePathHierarchyTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - char? delimiter = default; - char? replacement = default; - int? maxTokenLength = default; - bool? reverse = default; - int? skip = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("delimiter"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - delimiter = property.Value.GetChar(); - continue; - } - if (property.NameEquals("replacement"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - replacement = property.Value.GetChar(); - continue; - } - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("reverse"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - reverse = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("skip"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - skip = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PathHierarchyTokenizer( - odataType, - name, - delimiter, - replacement, - maxTokenLength, - reverse, - skip); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PathHierarchyTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePathHierarchyTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.cs deleted file mode 100644 index d6a96eeccd99..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PathHierarchyTokenizer.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Tokenizer for path-like hierarchies. This tokenizer is implemented using Apache Lucene. - public partial class PathHierarchyTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public PathHierarchyTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The delimiter character to use. Default is "/". - /// A value that, if set, replaces the delimiter character. Default is "/". - /// The maximum token length. Default and maximum is 300. - /// A value indicating whether to generate tokens in reverse order. Default is false. - /// The number of initial tokens to skip. Default is 0. - internal PathHierarchyTokenizer(string oDataType, string name, char? delimiter, char? replacement, int? maxTokenLength, bool? reverseTokenOrder, int? numberOfTokensToSkip) : base(oDataType, name) - { - Delimiter = delimiter; - Replacement = replacement; - MaxTokenLength = maxTokenLength; - ReverseTokenOrder = reverseTokenOrder; - NumberOfTokensToSkip = numberOfTokensToSkip; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; - } - - /// The delimiter character to use. Default is "/". - public char? Delimiter { get; set; } - /// A value that, if set, replaces the delimiter character. Default is "/". - public char? Replacement { get; set; } - /// The maximum token length. Default and maximum is 300. - public int? MaxTokenLength { get; set; } - /// A value indicating whether to generate tokens in reverse order. Default is false. - public bool? ReverseTokenOrder { get; set; } - /// The number of initial tokens to skip. Default is 0. - public int? NumberOfTokensToSkip { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.Serialization.cs deleted file mode 100644 index f88774ce9fe2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.Serialization.cs +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PatternAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(LowerCaseTerms)) - { - writer.WritePropertyName("lowercase"u8); - writer.WriteBooleanValue(LowerCaseTerms.Value); - } - if (Optional.IsDefined(Pattern)) - { - writer.WritePropertyName("pattern"u8); - writer.WriteStringValue(Pattern); - } - if (Optional.IsDefined(FlagsInternal)) - { - writer.WritePropertyName("flags"u8); - writer.WriteStringValue(FlagsInternal); - } - if (Optional.IsCollectionDefined(Stopwords)) - { - writer.WritePropertyName("stopwords"u8); - writer.WriteStartArray(); - foreach (var item in Stopwords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PatternAnalyzer DeserializePatternAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? lowercase = default; - string pattern = default; - string flags = default; - IList stopwords = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("lowercase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - lowercase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("pattern"u8)) - { - pattern = property.Value.GetString(); - continue; - } - if (property.NameEquals("flags"u8)) - { - flags = property.Value.GetString(); - continue; - } - if (property.NameEquals("stopwords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - stopwords = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PatternAnalyzer( - odataType, - name, - lowercase, - pattern, - flags, - stopwords ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PatternAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePatternAnalyzer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.cs deleted file mode 100644 index 39653249ef7f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternAnalyzer.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Flexibly separates text into terms via a regular expression pattern. This analyzer is implemented using Apache Lucene. - public partial class PatternAnalyzer : LexicalAnalyzer - { - /// Initializes a new instance of . - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public PatternAnalyzer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Stopwords = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.PatternAnalyzer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A value indicating whether terms should be lower-cased. Default is true. - /// A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. - /// Regular expression flags. - /// A list of stopwords. - internal PatternAnalyzer(string oDataType, string name, bool? lowerCaseTerms, string pattern, string flagsInternal, IList stopwords) : base(oDataType, name) - { - LowerCaseTerms = lowerCaseTerms; - Pattern = pattern; - FlagsInternal = flagsInternal; - Stopwords = stopwords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PatternAnalyzer"; - } - - /// A value indicating whether terms should be lower-cased. Default is true. - public bool? LowerCaseTerms { get; set; } - /// A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. - public string Pattern { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.Serialization.cs deleted file mode 100644 index 3d08f2e5b6ea..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.Serialization.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PatternCaptureTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("patterns"u8); - writer.WriteStartArray(); - foreach (var item in Patterns) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(PreserveOriginal)) - { - writer.WritePropertyName("preserveOriginal"u8); - writer.WriteBooleanValue(PreserveOriginal.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PatternCaptureTokenFilter DeserializePatternCaptureTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList patterns = default; - bool? preserveOriginal = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("patterns"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - patterns = array; - continue; - } - if (property.NameEquals("preserveOriginal"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - preserveOriginal = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PatternCaptureTokenFilter(odataType, name, patterns, preserveOriginal); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PatternCaptureTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePatternCaptureTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.cs deleted file mode 100644 index c1e82ef1c9e0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternCaptureTokenFilter.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Uses Java regexes to emit multiple tokens - one for each capture group in one or more patterns. This token filter is implemented using Apache Lucene. - public partial class PatternCaptureTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of patterns to match against each token. - /// or is null. - public PatternCaptureTokenFilter(string name, IEnumerable patterns) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(patterns, nameof(patterns)); - - Patterns = patterns.ToList(); - ODataType = "#Microsoft.Azure.Search.PatternCaptureTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of patterns to match against each token. - /// A value indicating whether to return the original token even if one of the patterns matches. Default is true. - internal PatternCaptureTokenFilter(string oDataType, string name, IList patterns, bool? preserveOriginal) : base(oDataType, name) - { - Patterns = patterns; - PreserveOriginal = preserveOriginal; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PatternCaptureTokenFilter"; - } - /// A value indicating whether to return the original token even if one of the patterns matches. Default is true. - public bool? PreserveOriginal { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.Serialization.cs deleted file mode 100644 index 122dcaed6eed..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.Serialization.cs +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PatternReplaceCharFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("pattern"u8); - writer.WriteStringValue(Pattern); - writer.WritePropertyName("replacement"u8); - writer.WriteStringValue(Replacement); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PatternReplaceCharFilter DeserializePatternReplaceCharFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string pattern = default; - string replacement = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("pattern"u8)) - { - pattern = property.Value.GetString(); - continue; - } - if (property.NameEquals("replacement"u8)) - { - replacement = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PatternReplaceCharFilter(odataType, name, pattern, replacement); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PatternReplaceCharFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePatternReplaceCharFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.cs deleted file mode 100644 index 4a5761057e6c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceCharFilter.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the result would be "aa#bb aa#bb". This character filter is implemented using Apache Lucene. - public partial class PatternReplaceCharFilter : CharFilter - { - /// Initializes a new instance of . - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A regular expression pattern. - /// The replacement text. - /// , or is null. - public PatternReplaceCharFilter(string name, string pattern, string replacement) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(pattern, nameof(pattern)); - Argument.AssertNotNull(replacement, nameof(replacement)); - - Pattern = pattern; - Replacement = replacement; - ODataType = "#Microsoft.Azure.Search.PatternReplaceCharFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of char filter. - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A regular expression pattern. - /// The replacement text. - internal PatternReplaceCharFilter(string oDataType, string name, string pattern, string replacement) : base(oDataType, name) - { - Pattern = pattern; - Replacement = replacement; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PatternReplaceCharFilter"; - } - - /// A regular expression pattern. - public string Pattern { get; set; } - /// The replacement text. - public string Replacement { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.Serialization.cs deleted file mode 100644 index fb280d321229..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.Serialization.cs +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PatternReplaceTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("pattern"u8); - writer.WriteStringValue(Pattern); - writer.WritePropertyName("replacement"u8); - writer.WriteStringValue(Replacement); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PatternReplaceTokenFilter DeserializePatternReplaceTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string pattern = default; - string replacement = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("pattern"u8)) - { - pattern = property.Value.GetString(); - continue; - } - if (property.NameEquals("replacement"u8)) - { - replacement = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PatternReplaceTokenFilter(odataType, name, pattern, replacement); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PatternReplaceTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePatternReplaceTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.cs deleted file mode 100644 index a0c709b8ad22..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternReplaceTokenFilter.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the result would be "aa#bb aa#bb". This token filter is implemented using Apache Lucene. - public partial class PatternReplaceTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A regular expression pattern. - /// The replacement text. - /// , or is null. - public PatternReplaceTokenFilter(string name, string pattern, string replacement) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(pattern, nameof(pattern)); - Argument.AssertNotNull(replacement, nameof(replacement)); - - Pattern = pattern; - Replacement = replacement; - ODataType = "#Microsoft.Azure.Search.PatternReplaceTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A regular expression pattern. - /// The replacement text. - internal PatternReplaceTokenFilter(string oDataType, string name, string pattern, string replacement) : base(oDataType, name) - { - Pattern = pattern; - Replacement = replacement; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PatternReplaceTokenFilter"; - } - - /// A regular expression pattern. - public string Pattern { get; set; } - /// The replacement text. - public string Replacement { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.Serialization.cs deleted file mode 100644 index 61adbe5f4725..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.Serialization.cs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PatternTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Pattern)) - { - writer.WritePropertyName("pattern"u8); - writer.WriteStringValue(Pattern); - } - if (Optional.IsDefined(FlagsInternal)) - { - writer.WritePropertyName("flags"u8); - writer.WriteStringValue(FlagsInternal); - } - if (Optional.IsDefined(Group)) - { - writer.WritePropertyName("group"u8); - writer.WriteNumberValue(Group.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PatternTokenizer DeserializePatternTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string pattern = default; - string flags = default; - int? group = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("pattern"u8)) - { - pattern = property.Value.GetString(); - continue; - } - if (property.NameEquals("flags"u8)) - { - flags = property.Value.GetString(); - continue; - } - if (property.NameEquals("group"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - group = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PatternTokenizer(odataType, name, pattern, flags, group); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PatternTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePatternTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.cs deleted file mode 100644 index 80b07f3896c4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PatternTokenizer.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Tokenizer that uses regex pattern matching to construct distinct tokens. This tokenizer is implemented using Apache Lucene. - public partial class PatternTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public PatternTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.PatternTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. - /// Regular expression flags. - /// The zero-based ordinal of the matching group in the regular expression pattern to extract into tokens. Use -1 if you want to use the entire pattern to split the input into tokens, irrespective of matching groups. Default is -1. - internal PatternTokenizer(string oDataType, string name, string pattern, string flagsInternal, int? group) : base(oDataType, name) - { - Pattern = pattern; - FlagsInternal = flagsInternal; - Group = group; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PatternTokenizer"; - } - - /// A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. - public string Pattern { get; set; } - /// The zero-based ordinal of the matching group in the regular expression pattern to extract into tokens. Use -1 if you want to use the entire pattern to split the input into tokens, irrespective of matching groups. Default is -1. - public int? Group { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.Serialization.cs deleted file mode 100644 index 36b8215fd6e2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class PhoneticEncoderExtensions - { - public static string ToSerialString(this PhoneticEncoder value) => value switch - { - PhoneticEncoder.Metaphone => "metaphone", - PhoneticEncoder.DoubleMetaphone => "doubleMetaphone", - PhoneticEncoder.Soundex => "soundex", - PhoneticEncoder.RefinedSoundex => "refinedSoundex", - PhoneticEncoder.Caverphone1 => "caverphone1", - PhoneticEncoder.Caverphone2 => "caverphone2", - PhoneticEncoder.Cologne => "cologne", - PhoneticEncoder.Nysiis => "nysiis", - PhoneticEncoder.KoelnerPhonetik => "koelnerPhonetik", - PhoneticEncoder.HaasePhonetik => "haasePhonetik", - PhoneticEncoder.BeiderMorse => "beiderMorse", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown PhoneticEncoder value.") - }; - - public static PhoneticEncoder ToPhoneticEncoder(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "metaphone")) return PhoneticEncoder.Metaphone; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "doubleMetaphone")) return PhoneticEncoder.DoubleMetaphone; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "soundex")) return PhoneticEncoder.Soundex; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "refinedSoundex")) return PhoneticEncoder.RefinedSoundex; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "caverphone1")) return PhoneticEncoder.Caverphone1; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "caverphone2")) return PhoneticEncoder.Caverphone2; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "cologne")) return PhoneticEncoder.Cologne; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "nysiis")) return PhoneticEncoder.Nysiis; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "koelnerPhonetik")) return PhoneticEncoder.KoelnerPhonetik; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "haasePhonetik")) return PhoneticEncoder.HaasePhonetik; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "beiderMorse")) return PhoneticEncoder.BeiderMorse; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown PhoneticEncoder value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.cs deleted file mode 100644 index d09c234a99ea..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticEncoder.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Identifies the type of phonetic encoder to use with a PhoneticTokenFilter. - public enum PhoneticEncoder - { - /// Encodes a token into a Metaphone value. - Metaphone, - /// Encodes a token into a double metaphone value. - DoubleMetaphone, - /// Encodes a token into a Soundex value. - Soundex, - /// Encodes a token into a Refined Soundex value. - RefinedSoundex, - /// Encodes a token into a Caverphone 1.0 value. - Caverphone1, - /// Encodes a token into a Caverphone 2.0 value. - Caverphone2, - /// Encodes a token into a Cologne Phonetic value. - Cologne, - /// Encodes a token into a NYSIIS value. - Nysiis, - /// Encodes a token using the Kölner Phonetik algorithm. - KoelnerPhonetik, - /// Encodes a token using the Haase refinement of the Kölner Phonetik algorithm. - HaasePhonetik, - /// Encodes a token into a Beider-Morse value. - BeiderMorse - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.Serialization.cs deleted file mode 100644 index b949f979fd35..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.Serialization.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PhoneticTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Encoder)) - { - writer.WritePropertyName("encoder"u8); - writer.WriteStringValue(Encoder.Value.ToSerialString()); - } - if (Optional.IsDefined(ReplaceOriginalTokens)) - { - writer.WritePropertyName("replace"u8); - writer.WriteBooleanValue(ReplaceOriginalTokens.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static PhoneticTokenFilter DeserializePhoneticTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - PhoneticEncoder? encoder = default; - bool? replace = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("encoder"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - encoder = property.Value.GetString().ToPhoneticEncoder(); - continue; - } - if (property.NameEquals("replace"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - replace = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new PhoneticTokenFilter(odataType, name, encoder, replace); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PhoneticTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePhoneticTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.cs deleted file mode 100644 index be1d1186619e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PhoneticTokenFilter.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Create tokens for phonetic matches. This token filter is implemented using Apache Lucene. - public partial class PhoneticTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public PhoneticTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.PhoneticTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The phonetic encoder to use. Default is "metaphone". - /// A value indicating whether encoded tokens should replace original tokens. If false, encoded tokens are added as synonyms. Default is true. - internal PhoneticTokenFilter(string oDataType, string name, PhoneticEncoder? encoder, bool? replaceOriginalTokens) : base(oDataType, name) - { - Encoder = encoder; - ReplaceOriginalTokens = replaceOriginalTokens; - ODataType = oDataType ?? "#Microsoft.Azure.Search.PhoneticTokenFilter"; - } - - /// The phonetic encoder to use. Default is "metaphone". - public PhoneticEncoder? Encoder { get; set; } - /// A value indicating whether encoded tokens should replace original tokens. If false, encoded tokens are added as synonyms. Default is true. - public bool? ReplaceOriginalTokens { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.Serialization.cs deleted file mode 100644 index f8588dd11a72..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.Serialization.cs +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class PiiDetectionSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(MinPrecision)) - { - if (MinPrecision != null) - { - writer.WritePropertyName("minimumPrecision"u8); - writer.WriteNumberValue(MinPrecision.Value); - } - else - { - writer.WriteNull("minimumPrecision"); - } - } - if (Optional.IsDefined(MaskingMode)) - { - writer.WritePropertyName("maskingMode"u8); - writer.WriteStringValue(MaskingMode.Value.ToString()); - } - if (Optional.IsDefined(Mask)) - { - if (Mask != null) - { - writer.WritePropertyName("maskingCharacter"u8); - writer.WriteStringValue(Mask); - } - else - { - writer.WriteNull("maskingCharacter"); - } - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - if (Optional.IsCollectionDefined(PiiCategories)) - { - writer.WritePropertyName("piiCategories"u8); - writer.WriteStartArray(); - foreach (var item in PiiCategories) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(Domain)) - { - if (Domain != null) - { - writer.WritePropertyName("domain"u8); - writer.WriteStringValue(Domain); - } - else - { - writer.WriteNull("domain"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static PiiDetectionSkill DeserializePiiDetectionSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string defaultLanguageCode = default; - double? minimumPrecision = default; - PiiDetectionSkillMaskingMode? maskingMode = default; - string maskingCharacter = default; - string modelVersion = default; - IList piiCategories = default; - string domain = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = property.Value.GetString(); - continue; - } - if (property.NameEquals("minimumPrecision"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - minimumPrecision = null; - continue; - } - minimumPrecision = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("maskingMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maskingMode = new PiiDetectionSkillMaskingMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("maskingCharacter"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maskingCharacter = null; - continue; - } - maskingCharacter = property.Value.GetString(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("piiCategories"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - piiCategories = array; - continue; - } - if (property.NameEquals("domain"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - domain = null; - continue; - } - domain = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new PiiDetectionSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - minimumPrecision, - maskingMode, - maskingCharacter, - modelVersion, - piiCategories ?? new ChangeTrackingList(), - domain); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new PiiDetectionSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializePiiDetectionSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.cs deleted file mode 100644 index 3062a265e535..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkill.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Using the Text Analytics API, extracts personal information from an input text and gives you the option of masking it. - public partial class PiiDetectionSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public PiiDetectionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - PiiCategories = new ChangeTrackingList(); - ODataType = "#Microsoft.Skills.Text.PIIDetectionSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - /// A parameter that provides various ways to mask the personal information detected in the input text. Default is 'none'. - /// The character used to mask the text if the maskingMode parameter is set to replace. Default is '*'. - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - /// A list of PII entity categories that should be extracted and masked. - /// If specified, will set the PII domain to include only a subset of the entity categories. Possible values include: 'phi', 'none'. Default is 'none'. - internal PiiDetectionSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string defaultLanguageCode, double? minPrecision, PiiDetectionSkillMaskingMode? maskingMode, string mask, string modelVersion, IList piiCategories, string domain) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - MinPrecision = minPrecision; - MaskingMode = maskingMode; - Mask = mask; - ModelVersion = modelVersion; - PiiCategories = piiCategories; - Domain = domain; - ODataType = oDataType ?? "#Microsoft.Skills.Text.PIIDetectionSkill"; - } - /// A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. - public double? MinPrecision { get; set; } - /// A parameter that provides various ways to mask the personal information detected in the input text. Default is 'none'. - public PiiDetectionSkillMaskingMode? MaskingMode { get; set; } - /// The character used to mask the text if the maskingMode parameter is set to replace. Default is '*'. - public string Mask { get; set; } - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - public string ModelVersion { get; set; } - /// A list of PII entity categories that should be extracted and masked. - public IList PiiCategories { get; } - /// If specified, will set the PII domain to include only a subset of the entity categories. Possible values include: 'phi', 'none'. Default is 'none'. - public string Domain { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkillMaskingMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkillMaskingMode.cs deleted file mode 100644 index 13086c3b6214..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/PiiDetectionSkillMaskingMode.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A string indicating what maskingMode to use to mask the personal information detected in the input text. - public readonly partial struct PiiDetectionSkillMaskingMode : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public PiiDetectionSkillMaskingMode(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string NoneValue = "none"; - private const string ReplaceValue = "replace"; - - /// No masking occurs and the maskedText output will not be returned. - public static PiiDetectionSkillMaskingMode None { get; } = new PiiDetectionSkillMaskingMode(NoneValue); - /// Replaces the detected entities with the character given in the maskingCharacter parameter. The character will be repeated to the length of the detected entity so that the offsets will correctly correspond to both the input text as well as the output maskedText. - public static PiiDetectionSkillMaskingMode Replace { get; } = new PiiDetectionSkillMaskingMode(ReplaceValue); - /// Determines if two values are the same. - public static bool operator ==(PiiDetectionSkillMaskingMode left, PiiDetectionSkillMaskingMode right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(PiiDetectionSkillMaskingMode left, PiiDetectionSkillMaskingMode right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator PiiDetectionSkillMaskingMode(string value) => new PiiDetectionSkillMaskingMode(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is PiiDetectionSkillMaskingMode other && Equals(other); - /// - public bool Equals(PiiDetectionSkillMaskingMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.Serialization.cs deleted file mode 100644 index 718d0db7a9cb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.Serialization.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryAnswerResult - { - internal static QueryAnswerResult DeserializeQueryAnswerResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double? score = default; - string key = default; - string text = default; - string highlights = default; - IReadOnlyDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("score"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - score = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("text"u8)) - { - text = property.Value.GetString(); - continue; - } - if (property.NameEquals("highlights"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - highlights = null; - continue; - } - highlights = property.Value.GetString(); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new QueryAnswerResult(score, key, text, highlights, additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryAnswerResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryAnswerResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.cs deleted file mode 100644 index c874dd1ecb03..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerResult.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// An answer is a text passage extracted from the contents of the most relevant documents that matched the query. Answers are extracted from the top search results. Answer candidates are scored and the top answers are selected. - public partial class QueryAnswerResult - { - /// Initializes a new instance of . - internal QueryAnswerResult() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The score value represents how relevant the answer is to the query relative to other answers returned for the query. - /// The key of the document the answer was extracted from. - /// The text passage extracted from the document contents as the answer. - /// Same text passage as in the Text property with highlighted text phrases most relevant to the query. - /// Additional Properties. - internal QueryAnswerResult(double? score, string key, string text, string highlights, IReadOnlyDictionary additionalProperties) - { - Score = score; - Key = key; - Text = text; - Highlights = highlights; - AdditionalProperties = additionalProperties; - } - - /// The score value represents how relevant the answer is to the query relative to other answers returned for the query. - public double? Score { get; } - /// The key of the document the answer was extracted from. - public string Key { get; } - /// The text passage extracted from the document contents as the answer. - public string Text { get; } - /// Same text passage as in the Text property with highlighted text phrases most relevant to the query. - public string Highlights { get; } - /// Additional Properties. - public IReadOnlyDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerType.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerType.cs deleted file mode 100644 index 814f115eedbf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryAnswerType.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Models -{ - /// This parameter is only valid if the query type is `semantic`. If set, the query returns answers extracted from key passages in the highest ranked documents. The number of answers returned can be configured by appending the pipe character `|` followed by the `count-<number of answers>` option after the answers parameter value, such as `extractive|count-3`. Default count is 1. The confidence threshold can be configured by appending the pipe character `|` followed by the `threshold-<confidence threshold>` option after the answers parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. The maximum character length of answers can be configured by appending the pipe character '|' followed by the 'count-<number of maximum character length>', such as 'extractive|maxcharlength-600'. - public readonly partial struct QueryAnswerType : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public QueryAnswerType(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string NoneValue = "none"; - private const string ExtractiveValue = "extractive"; - - /// Do not return answers for the query. - public static QueryAnswerType None { get; } = new QueryAnswerType(NoneValue); - /// Extracts answer candidates from the contents of the documents returned in response to a query expressed as a question in natural language. - public static QueryAnswerType Extractive { get; } = new QueryAnswerType(ExtractiveValue); - /// Determines if two values are the same. - public static bool operator ==(QueryAnswerType left, QueryAnswerType right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(QueryAnswerType left, QueryAnswerType right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator QueryAnswerType(string value) => new QueryAnswerType(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is QueryAnswerType other && Equals(other); - /// - public bool Equals(QueryAnswerType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.Serialization.cs deleted file mode 100644 index 8d3a2ec4053c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryCaptionResult - { - internal static QueryCaptionResult DeserializeQueryCaptionResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string text = default; - string highlights = default; - IReadOnlyDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - text = property.Value.GetString(); - continue; - } - if (property.NameEquals("highlights"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - highlights = null; - continue; - } - highlights = property.Value.GetString(); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new QueryCaptionResult(text, highlights, additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryCaptionResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryCaptionResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.cs deleted file mode 100644 index e87f89ad78b4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionResult.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type `semantic`. - public partial class QueryCaptionResult - { - /// Initializes a new instance of . - internal QueryCaptionResult() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// A representative text passage extracted from the document most relevant to the search query. - /// Same text passage as in the Text property with highlighted phrases most relevant to the query. - /// Additional Properties. - internal QueryCaptionResult(string text, string highlights, IReadOnlyDictionary additionalProperties) - { - Text = text; - Highlights = highlights; - AdditionalProperties = additionalProperties; - } - - /// A representative text passage extracted from the document most relevant to the search query. - public string Text { get; } - /// Same text passage as in the Text property with highlighted phrases most relevant to the query. - public string Highlights { get; } - /// Additional Properties. - public IReadOnlyDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionType.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionType.cs deleted file mode 100644 index f15e06988591..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryCaptionType.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Models -{ - /// This parameter is only valid if the query type is `semantic`. If set, the query returns captions extracted from key passages in the highest ranked documents. When Captions is set to `extractive`, highlighting is enabled by default, and can be configured by appending the pipe character `|` followed by the `highlight-<true/false>` option, such as `extractive|highlight-true`. Defaults to `None`. The maximum character length of captions can be configured by appending the pipe character '|' followed by the 'count-<number of maximum character length>', such as 'extractive|maxcharlength-600'. - public readonly partial struct QueryCaptionType : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public QueryCaptionType(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string NoneValue = "none"; - private const string ExtractiveValue = "extractive"; - - /// Do not return captions for the query. - public static QueryCaptionType None { get; } = new QueryCaptionType(NoneValue); - /// Extracts captions from the matching documents that contain passages relevant to the search query. - public static QueryCaptionType Extractive { get; } = new QueryCaptionType(ExtractiveValue); - /// Determines if two values are the same. - public static bool operator ==(QueryCaptionType left, QueryCaptionType right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(QueryCaptionType left, QueryCaptionType right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator QueryCaptionType(string value) => new QueryCaptionType(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is QueryCaptionType other && Equals(other); - /// - public bool Equals(QueryCaptionType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.Serialization.cs deleted file mode 100644 index 4843d26ac77a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.Serialization.cs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryResultDocumentRerankerInput - { - internal static QueryResultDocumentRerankerInput DeserializeQueryResultDocumentRerankerInput(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string title = default; - string content = default; - string keywords = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("title"u8)) - { - title = property.Value.GetString(); - continue; - } - if (property.NameEquals("content"u8)) - { - content = property.Value.GetString(); - continue; - } - if (property.NameEquals("keywords"u8)) - { - keywords = property.Value.GetString(); - continue; - } - } - return new QueryResultDocumentRerankerInput(title, content, keywords); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryResultDocumentRerankerInput FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryResultDocumentRerankerInput(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.cs deleted file mode 100644 index 2980704e7d3c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentRerankerInput.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The raw concatenated strings that were sent to the semantic enrichment process. - public partial class QueryResultDocumentRerankerInput - { - /// Initializes a new instance of . - internal QueryResultDocumentRerankerInput() - { - } - - /// Initializes a new instance of . - /// The raw string for the title field that was used for semantic enrichment. - /// The raw concatenated strings for the content fields that were used for semantic enrichment. - /// The raw concatenated strings for the keyword fields that were used for semantic enrichment. - internal QueryResultDocumentRerankerInput(string title, string content, string keywords) - { - Title = title; - Content = content; - Keywords = keywords; - } - - /// The raw string for the title field that was used for semantic enrichment. - public string Title { get; } - /// The raw concatenated strings for the content fields that were used for semantic enrichment. - public string Content { get; } - /// The raw concatenated strings for the keyword fields that were used for semantic enrichment. - public string Keywords { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.Serialization.cs deleted file mode 100644 index 566cb4b3fc78..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.Serialization.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryResultDocumentSemanticField - { - internal static QueryResultDocumentSemanticField DeserializeQueryResultDocumentSemanticField(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - SemanticFieldState? state = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("state"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - state = new SemanticFieldState(property.Value.GetString()); - continue; - } - } - return new QueryResultDocumentSemanticField(name, state); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryResultDocumentSemanticField FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryResultDocumentSemanticField(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.cs deleted file mode 100644 index 6d05204a9fc2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSemanticField.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Description of fields that were sent to the semantic enrichment process, as well as how they were used. - public partial class QueryResultDocumentSemanticField - { - /// Initializes a new instance of . - internal QueryResultDocumentSemanticField() - { - } - - /// Initializes a new instance of . - /// The name of the field that was sent to the semantic enrichment process. - /// The way the field was used for the semantic enrichment process (fully used, partially used, or unused). - internal QueryResultDocumentSemanticField(string name, SemanticFieldState? state) - { - Name = name; - State = state; - } - - /// The name of the field that was sent to the semantic enrichment process. - public string Name { get; } - /// The way the field was used for the semantic enrichment process (fully used, partially used, or unused). - public SemanticFieldState? State { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.Serialization.cs deleted file mode 100644 index 000d2cd0471c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.Serialization.cs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryResultDocumentSubscores - { - internal static QueryResultDocumentSubscores DeserializeQueryResultDocumentSubscores(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TextResult text = default; - IReadOnlyList> vectors = default; - double? documentBoost = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - text = TextResult.DeserializeTextResult(property.Value); - continue; - } - if (property.NameEquals("vectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List> array = new List>(); - foreach (var item in property.Value.EnumerateArray()) - { - if (item.ValueKind == JsonValueKind.Null) - { - array.Add(null); - } - else - { - Dictionary dictionary = new Dictionary(); - foreach (var property0 in item.EnumerateObject()) - { - dictionary.Add(property0.Name, SingleVectorFieldResult.DeserializeSingleVectorFieldResult(property0.Value)); - } - array.Add(dictionary); - } - } - vectors = array; - continue; - } - if (property.NameEquals("documentBoost"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - documentBoost = property.Value.GetDouble(); - continue; - } - } - return new QueryResultDocumentSubscores(text, vectors ?? new ChangeTrackingList>(), documentBoost); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryResultDocumentSubscores FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryResultDocumentSubscores(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.cs deleted file mode 100644 index 6b4cbd764cb4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryResultDocumentSubscores.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// The breakdown of subscores between the text and vector query components of the search query for this document. Each vector query is shown as a separate object in the same order they were received. - public partial class QueryResultDocumentSubscores - { - /// Initializes a new instance of . - internal QueryResultDocumentSubscores() - { - Vectors = new ChangeTrackingList>(); - } - - /// Initializes a new instance of . - /// The BM25 or Classic score for the text portion of the query. - /// The vector similarity and @search.score values for each vector query. - /// The BM25 or Classic score for the text portion of the query. - internal QueryResultDocumentSubscores(TextResult text, IReadOnlyList> vectors, double? documentBoost) - { - Text = text; - Vectors = vectors; - DocumentBoost = documentBoost; - } - - /// The BM25 or Classic score for the text portion of the query. - public TextResult Text { get; } - /// The vector similarity and @search.score values for each vector query. - public IReadOnlyList> Vectors { get; } - /// The BM25 or Classic score for the text portion of the query. - public double? DocumentBoost { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.Serialization.cs deleted file mode 100644 index 35aaca3c1312..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.Serialization.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryRewritesDebugInfo - { - internal static QueryRewritesDebugInfo DeserializeQueryRewritesDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - QueryRewritesValuesDebugInfo text = default; - IReadOnlyList vectors = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - text = QueryRewritesValuesDebugInfo.DeserializeQueryRewritesValuesDebugInfo(property.Value); - continue; - } - if (property.NameEquals("vectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(QueryRewritesValuesDebugInfo.DeserializeQueryRewritesValuesDebugInfo(item)); - } - vectors = array; - continue; - } - } - return new QueryRewritesDebugInfo(text, vectors ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryRewritesDebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryRewritesDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.cs deleted file mode 100644 index 29f0f869191e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesDebugInfo.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// Contains debugging information specific to query rewrites. - public partial class QueryRewritesDebugInfo - { - /// Initializes a new instance of . - internal QueryRewritesDebugInfo() - { - Vectors = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// List of query rewrites generated for the text query. - /// List of query rewrites generated for the vectorizable text queries. - internal QueryRewritesDebugInfo(QueryRewritesValuesDebugInfo text, IReadOnlyList vectors) - { - Text = text; - Vectors = vectors; - } - - /// List of query rewrites generated for the text query. - public QueryRewritesValuesDebugInfo Text { get; } - /// List of query rewrites generated for the vectorizable text queries. - public IReadOnlyList Vectors { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.Serialization.cs deleted file mode 100644 index abb1edb64f43..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class QueryRewritesValuesDebugInfo - { - internal static QueryRewritesValuesDebugInfo DeserializeQueryRewritesValuesDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string inputQuery = default; - IReadOnlyList rewrites = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("inputQuery"u8)) - { - inputQuery = property.Value.GetString(); - continue; - } - if (property.NameEquals("rewrites"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - rewrites = array; - continue; - } - } - return new QueryRewritesValuesDebugInfo(inputQuery, rewrites ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static QueryRewritesValuesDebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeQueryRewritesValuesDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.cs deleted file mode 100644 index 953d64ae6719..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesValuesDebugInfo.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// Contains debugging information specific to query rewrites. - public partial class QueryRewritesValuesDebugInfo - { - /// Initializes a new instance of . - internal QueryRewritesValuesDebugInfo() - { - Rewrites = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The input text to the generative query rewriting model. There may be cases where the user query and the input to the generative model are not identical. - /// List of query rewrites. - internal QueryRewritesValuesDebugInfo(string inputQuery, IReadOnlyList rewrites) - { - InputQuery = inputQuery; - Rewrites = rewrites; - } - - /// The input text to the generative query rewriting model. There may be cases where the user query and the input to the generative model are not identical. - public string InputQuery { get; } - /// List of query rewrites. - public IReadOnlyList Rewrites { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/RegexFlag.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/RegexFlag.cs deleted file mode 100644 index 66fe5fa46b8f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/RegexFlag.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines flags that can be combined to control how regular expressions are used in the pattern analyzer and pattern tokenizer. - public readonly partial struct RegexFlag : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public RegexFlag(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string CanonEqValue = "CANON_EQ"; - private const string CaseInsensitiveValue = "CASE_INSENSITIVE"; - private const string CommentsValue = "COMMENTS"; - private const string DotAllValue = "DOTALL"; - private const string LiteralValue = "LITERAL"; - private const string MultilineValue = "MULTILINE"; - private const string UnicodeCaseValue = "UNICODE_CASE"; - private const string UnixLinesValue = "UNIX_LINES"; - - /// Enables canonical equivalence. - public static RegexFlag CanonEq { get; } = new RegexFlag(CanonEqValue); - /// Enables case-insensitive matching. - public static RegexFlag CaseInsensitive { get; } = new RegexFlag(CaseInsensitiveValue); - /// Permits whitespace and comments in the pattern. - public static RegexFlag Comments { get; } = new RegexFlag(CommentsValue); - /// Enables dotall mode. - public static RegexFlag DotAll { get; } = new RegexFlag(DotAllValue); - /// Enables literal parsing of the pattern. - public static RegexFlag Literal { get; } = new RegexFlag(LiteralValue); - /// Enables multiline mode. - public static RegexFlag Multiline { get; } = new RegexFlag(MultilineValue); - /// Enables Unicode-aware case folding. - public static RegexFlag UnicodeCase { get; } = new RegexFlag(UnicodeCaseValue); - /// Enables Unix lines mode. - public static RegexFlag UnixLines { get; } = new RegexFlag(UnixLinesValue); - /// Determines if two values are the same. - public static bool operator ==(RegexFlag left, RegexFlag right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(RegexFlag left, RegexFlag right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator RegexFlag(string value) => new RegexFlag(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is RegexFlag other && Equals(other); - /// - public bool Equals(RegexFlag other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.Serialization.cs deleted file mode 100644 index d6751c95b0f3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.Serialization.cs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class RescoringOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(EnableRescoring)) - { - if (EnableRescoring != null) - { - writer.WritePropertyName("enableRescoring"u8); - writer.WriteBooleanValue(EnableRescoring.Value); - } - else - { - writer.WriteNull("enableRescoring"); - } - } - if (Optional.IsDefined(DefaultOversampling)) - { - if (DefaultOversampling != null) - { - writer.WritePropertyName("defaultOversampling"u8); - writer.WriteNumberValue(DefaultOversampling.Value); - } - else - { - writer.WriteNull("defaultOversampling"); - } - } - if (Optional.IsDefined(RescoreStorageMethod)) - { - if (RescoreStorageMethod != null) - { - writer.WritePropertyName("rescoreStorageMethod"u8); - writer.WriteStringValue(RescoreStorageMethod.Value.ToString()); - } - else - { - writer.WriteNull("rescoreStorageMethod"); - } - } - writer.WriteEndObject(); - } - - internal static RescoringOptions DeserializeRescoringOptions(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? enableRescoring = default; - double? defaultOversampling = default; - VectorSearchCompressionRescoreStorageMethod? rescoreStorageMethod = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("enableRescoring"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - enableRescoring = null; - continue; - } - enableRescoring = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultOversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultOversampling = null; - continue; - } - defaultOversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("rescoreStorageMethod"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - rescoreStorageMethod = null; - continue; - } - rescoreStorageMethod = new VectorSearchCompressionRescoreStorageMethod(property.Value.GetString()); - continue; - } - } - return new RescoringOptions(enableRescoring, defaultOversampling, rescoreStorageMethod); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static RescoringOptions FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeRescoringOptions(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.cs deleted file mode 100644 index e267c291cf03..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/RescoringOptions.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains the options for rescoring. - public partial class RescoringOptions - { - /// Initializes a new instance of . - public RescoringOptions() - { - } - - /// Initializes a new instance of . - /// If set to true, after the initial search on the compressed vectors, the similarity scores are recalculated using the full-precision vectors. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling retrieves a greater set of potential documents to offset the resolution loss due to quantization. This increases the set of results that will be rescored on full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when 'enableRescoring' is true. Higher values improve recall at the expense of latency. - /// Controls the storage method for original vectors. This setting is immutable. - internal RescoringOptions(bool? enableRescoring, double? defaultOversampling, VectorSearchCompressionRescoreStorageMethod? rescoreStorageMethod) - { - EnableRescoring = enableRescoring; - DefaultOversampling = defaultOversampling; - RescoreStorageMethod = rescoreStorageMethod; - } - - /// If set to true, after the initial search on the compressed vectors, the similarity scores are recalculated using the full-precision vectors. This will improve recall at the expense of latency. - public bool? EnableRescoring { get; set; } - /// Default oversampling factor. Oversampling retrieves a greater set of potential documents to offset the resolution loss due to quantization. This increases the set of results that will be rescored on full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when 'enableRescoring' is true. Higher values improve recall at the expense of latency. - public double? DefaultOversampling { get; set; } - /// Controls the storage method for original vectors. This setting is immutable. - public VectorSearchCompressionRescoreStorageMethod? RescoreStorageMethod { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.Serialization.cs deleted file mode 100644 index d80cf96ab34a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.Serialization.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class ResetDocumentOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(DocumentKeys)) - { - writer.WritePropertyName("documentKeys"u8); - writer.WriteStartArray(); - foreach (var item in DocumentKeys) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(DataSourceDocumentIds)) - { - writer.WritePropertyName("datasourceDocumentIds"u8); - writer.WriteStartArray(); - foreach (var item in DataSourceDocumentIds) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.cs deleted file mode 100644 index dbd74a1ae1f4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetDocumentOptions.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// The DocumentKeysOrIds. - public partial class ResetDocumentOptions - { - /// Initializes a new instance of . - public ResetDocumentOptions() - { - DocumentKeys = new ChangeTrackingList(); - DataSourceDocumentIds = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// document keys to be reset. - /// datasource document identifiers to be reset. - internal ResetDocumentOptions(IList documentKeys, IList dataSourceDocumentIds) - { - DocumentKeys = documentKeys; - DataSourceDocumentIds = dataSourceDocumentIds; - } - - /// document keys to be reset. - public IList DocumentKeys { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.Serialization.cs deleted file mode 100644 index defcd28822f0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.Serialization.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class ResetSkillsOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(SkillNames)) - { - writer.WritePropertyName("skillNames"u8); - writer.WriteStartArray(); - foreach (var item in SkillNames) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.cs deleted file mode 100644 index 3cfade9e4f97..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ResetSkillsOptions.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// The SkillNames. - public partial class ResetSkillsOptions - { - /// Initializes a new instance of . - public ResetSkillsOptions() - { - SkillNames = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// the names of skills to be reset. - internal ResetSkillsOptions(IList skillNames) - { - SkillNames = skillNames; - } - - /// the names of skills to be reset. - public IList SkillNames { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.Serialization.cs deleted file mode 100644 index cdb6dd2ee977..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.Serialization.cs +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ScalarQuantizationCompression : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("scalarQuantizationParameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(CompressionName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(RerankWithOriginalVectors)) - { - writer.WritePropertyName("rerankWithOriginalVectors"u8); - writer.WriteBooleanValue(RerankWithOriginalVectors.Value); - } - if (Optional.IsDefined(DefaultOversampling)) - { - if (DefaultOversampling != null) - { - writer.WritePropertyName("defaultOversampling"u8); - writer.WriteNumberValue(DefaultOversampling.Value); - } - else - { - writer.WriteNull("defaultOversampling"); - } - } - if (Optional.IsDefined(RescoringOptions)) - { - if (RescoringOptions != null) - { - writer.WritePropertyName("rescoringOptions"u8); - writer.WriteObjectValue(RescoringOptions); - } - else - { - writer.WriteNull("rescoringOptions"); - } - } - if (Optional.IsDefined(TruncationDimension)) - { - if (TruncationDimension != null) - { - writer.WritePropertyName("truncationDimension"u8); - writer.WriteNumberValue(TruncationDimension.Value); - } - else - { - writer.WriteNull("truncationDimension"); - } - } - writer.WriteEndObject(); - } - - internal static ScalarQuantizationCompression DeserializeScalarQuantizationCompression(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ScalarQuantizationParameters scalarQuantizationParameters = default; - string name = default; - VectorSearchCompressionKind kind = default; - bool? rerankWithOriginalVectors = default; - double? defaultOversampling = default; - RescoringOptions rescoringOptions = default; - int? truncationDimension = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("scalarQuantizationParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - scalarQuantizationParameters = ScalarQuantizationParameters.DeserializeScalarQuantizationParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchCompressionKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("rerankWithOriginalVectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - rerankWithOriginalVectors = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultOversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultOversampling = null; - continue; - } - defaultOversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("rescoringOptions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - rescoringOptions = null; - continue; - } - rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value); - continue; - } - if (property.NameEquals("truncationDimension"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - truncationDimension = null; - continue; - } - truncationDimension = property.Value.GetInt32(); - continue; - } - } - return new ScalarQuantizationCompression( - name, - kind, - rerankWithOriginalVectors, - defaultOversampling, - rescoringOptions, - truncationDimension, - scalarQuantizationParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ScalarQuantizationCompression FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeScalarQuantizationCompression(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs deleted file mode 100644 index fa10f9817a63..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains configuration options specific to the scalar quantization compression method used during indexing and querying. - public partial class ScalarQuantizationCompression : VectorSearchCompression - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - public ScalarQuantizationCompression(string compressionName) : base(compressionName) - { - Argument.AssertNotNull(compressionName, nameof(compressionName)); - - Kind = VectorSearchCompressionKind.ScalarQuantization; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. - /// Contains the options for rescoring. - /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. - /// Contains the parameters specific to Scalar Quantization. - internal ScalarQuantizationCompression(string compressionName, VectorSearchCompressionKind kind, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension, ScalarQuantizationParameters parameters) : base(compressionName, kind, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension) - { - Parameters = parameters; - Kind = kind; - } - - /// Contains the parameters specific to Scalar Quantization. - public ScalarQuantizationParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.Serialization.cs deleted file mode 100644 index 081bee5d424f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.Serialization.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ScalarQuantizationParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(QuantizedDataType)) - { - if (QuantizedDataType != null) - { - writer.WritePropertyName("quantizedDataType"u8); - writer.WriteStringValue(QuantizedDataType.Value.ToString()); - } - else - { - writer.WriteNull("quantizedDataType"); - } - } - writer.WriteEndObject(); - } - - internal static ScalarQuantizationParameters DeserializeScalarQuantizationParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - VectorSearchCompressionTarget? quantizedDataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("quantizedDataType"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - quantizedDataType = null; - continue; - } - quantizedDataType = new VectorSearchCompressionTarget(property.Value.GetString()); - continue; - } - } - return new ScalarQuantizationParameters(quantizedDataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ScalarQuantizationParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeScalarQuantizationParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.cs deleted file mode 100644 index c687a6d3994d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationParameters.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains the parameters specific to Scalar Quantization. - public partial class ScalarQuantizationParameters - { - /// Initializes a new instance of . - public ScalarQuantizationParameters() - { - } - - /// Initializes a new instance of . - /// The quantized data type of compressed vector values. - internal ScalarQuantizationParameters(VectorSearchCompressionTarget? quantizedDataType) - { - QuantizedDataType = quantizedDataType; - } - - /// The quantized data type of compressed vector values. - public VectorSearchCompressionTarget? QuantizedDataType { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.Serialization.cs deleted file mode 100644 index f0a0160b50a6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static ScoringFunction DeserializeScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "distance": return DistanceScoringFunction.DeserializeDistanceScoringFunction(element); - case "freshness": return FreshnessScoringFunction.DeserializeFreshnessScoringFunction(element); - case "magnitude": return MagnitudeScoringFunction.DeserializeMagnitudeScoringFunction(element); - case "tag": return TagScoringFunction.DeserializeTagScoringFunction(element); - } - } - return UnknownScoringFunction.DeserializeUnknownScoringFunction(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeScoringFunction(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.cs deleted file mode 100644 index a8fc25985a03..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunction.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for functions that can modify document scores during ranking. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public partial class ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - internal ScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation) - { - Type = type; - FieldName = fieldName; - Boost = boost; - Interpolation = interpolation; - } - - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - internal string Type { get; set; } - /// The name of the field used as input to the scoring function. - public string FieldName { get; set; } - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - public double Boost { get; set; } - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - public ScoringFunctionInterpolation? Interpolation { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.Serialization.cs deleted file mode 100644 index fa763b68d5da..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.Serialization.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class ScoringFunctionAggregationExtensions - { - public static string ToSerialString(this ScoringFunctionAggregation value) => value switch - { - ScoringFunctionAggregation.Sum => "sum", - ScoringFunctionAggregation.Average => "average", - ScoringFunctionAggregation.Minimum => "minimum", - ScoringFunctionAggregation.Maximum => "maximum", - ScoringFunctionAggregation.FirstMatching => "firstMatching", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringFunctionAggregation value.") - }; - - public static ScoringFunctionAggregation ToScoringFunctionAggregation(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "sum")) return ScoringFunctionAggregation.Sum; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "average")) return ScoringFunctionAggregation.Average; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimum")) return ScoringFunctionAggregation.Minimum; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "maximum")) return ScoringFunctionAggregation.Maximum; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "firstMatching")) return ScoringFunctionAggregation.FirstMatching; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringFunctionAggregation value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.cs deleted file mode 100644 index fee3a34f148c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionAggregation.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the aggregation function used to combine the results of all the scoring functions in a scoring profile. - public enum ScoringFunctionAggregation - { - /// Boost scores by the sum of all scoring function results. - Sum, - /// Boost scores by the average of all scoring function results. - Average, - /// Boost scores by the minimum of all scoring function results. - Minimum, - /// Boost scores by the maximum of all scoring function results. - Maximum, - /// Boost scores using the first applicable scoring function in the scoring profile. - FirstMatching - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.Serialization.cs deleted file mode 100644 index b24a148109ba..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.Serialization.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class ScoringFunctionInterpolationExtensions - { - public static string ToSerialString(this ScoringFunctionInterpolation value) => value switch - { - ScoringFunctionInterpolation.Linear => "linear", - ScoringFunctionInterpolation.Constant => "constant", - ScoringFunctionInterpolation.Quadratic => "quadratic", - ScoringFunctionInterpolation.Logarithmic => "logarithmic", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringFunctionInterpolation value.") - }; - - public static ScoringFunctionInterpolation ToScoringFunctionInterpolation(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "linear")) return ScoringFunctionInterpolation.Linear; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "constant")) return ScoringFunctionInterpolation.Constant; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "quadratic")) return ScoringFunctionInterpolation.Quadratic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "logarithmic")) return ScoringFunctionInterpolation.Logarithmic; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringFunctionInterpolation value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.cs deleted file mode 100644 index d657c057c061..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringFunctionInterpolation.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the function used to interpolate score boosting across a range of documents. - public enum ScoringFunctionInterpolation - { - /// Boosts scores by a linearly decreasing amount. This is the default interpolation for scoring functions. - Linear, - /// Boosts scores by a constant factor. - Constant, - /// Boosts scores by an amount that decreases quadratically. Boosts decrease slowly for higher scores, and more quickly as the scores decrease. This interpolation option is not allowed in tag scoring functions. - Quadratic, - /// Boosts scores by an amount that decreases logarithmically. Boosts decrease quickly for higher scores, and more slowly as the scores decrease. This interpolation option is not allowed in tag scoring functions. - Logarithmic - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.Serialization.cs deleted file mode 100644 index 818539dfdc28..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.Serialization.cs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ScoringProfile : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(TextWeights)) - { - if (TextWeights != null) - { - writer.WritePropertyName("text"u8); - writer.WriteObjectValue(TextWeights); - } - else - { - writer.WriteNull("text"); - } - } - if (Optional.IsCollectionDefined(Functions)) - { - writer.WritePropertyName("functions"u8); - writer.WriteStartArray(); - foreach (var item in Functions) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(FunctionAggregation)) - { - if (FunctionAggregation != null) - { - writer.WritePropertyName("functionAggregation"u8); - writer.WriteStringValue(FunctionAggregation.Value.ToSerialString()); - } - else - { - writer.WriteNull("functionAggregation"); - } - } - writer.WriteEndObject(); - } - - internal static ScoringProfile DeserializeScoringProfile(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - TextWeights text = default; - IList functions = default; - ScoringFunctionAggregation? functionAggregation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("text"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - text = null; - continue; - } - text = TextWeights.DeserializeTextWeights(property.Value); - continue; - } - if (property.NameEquals("functions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(ScoringFunction.DeserializeScoringFunction(item)); - } - functions = array; - continue; - } - if (property.NameEquals("functionAggregation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - functionAggregation = null; - continue; - } - functionAggregation = property.Value.GetString().ToScoringFunctionAggregation(); - continue; - } - } - return new ScoringProfile(name, text, functions ?? new ChangeTrackingList(), functionAggregation); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static ScoringProfile FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeScoringProfile(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.cs deleted file mode 100644 index d97a650cf2fb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringProfile.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines parameters for a search index that influence scoring in search queries. - public partial class ScoringProfile - { - /// Initializes a new instance of . - /// The name of the scoring profile. - /// is null. - public ScoringProfile(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - Functions = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The name of the scoring profile. - /// Parameters that boost scoring based on text matches in certain index fields. - /// - /// The collection of functions that influence the scoring of documents. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - /// A value indicating how the results of individual scoring functions should be combined. Defaults to "Sum". Ignored if there are no scoring functions. - internal ScoringProfile(string name, TextWeights textWeights, IList functions, ScoringFunctionAggregation? functionAggregation) - { - Name = name; - TextWeights = textWeights; - Functions = functions; - FunctionAggregation = functionAggregation; - } - - /// The name of the scoring profile. - public string Name { get; set; } - /// Parameters that boost scoring based on text matches in certain index fields. - public TextWeights TextWeights { get; set; } - /// A value indicating how the results of individual scoring functions should be combined. Defaults to "Sum". Ignored if there are no scoring functions. - public ScoringFunctionAggregation? FunctionAggregation { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.Serialization.cs deleted file mode 100644 index 7b4d9276bf10..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.Serialization.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - internal static partial class ScoringStatisticsExtensions - { - public static string ToSerialString(this ScoringStatistics value) => value switch - { - ScoringStatistics.Local => "local", - ScoringStatistics.Global => "global", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringStatistics value.") - }; - - public static ScoringStatistics ToScoringStatistics(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "local")) return ScoringStatistics.Local; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "global")) return ScoringStatistics.Global; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown ScoringStatistics value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.cs deleted file mode 100644 index f69683a030f9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScoringStatistics.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// A value that specifies whether we want to calculate scoring statistics (such as document frequency) globally for more consistent scoring, or locally, for lower latency. The default is 'local'. Use 'global' to aggregate scoring statistics globally before scoring. Using global scoring statistics can increase latency of search queries. - public enum ScoringStatistics - { - /// The scoring statistics will be calculated locally for lower latency. - Local, - /// The scoring statistics will be calculated globally for more consistent scoring. - Global - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.Serialization.cs deleted file mode 100644 index 07aeeb4adcc2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.Serialization.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchAlias : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("indexes"u8); - writer.WriteStartArray(); - foreach (var item in Indexes) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - writer.WriteEndObject(); - } - - internal static SearchAlias DeserializeSearchAlias(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - IList indexes = default; - string odataEtag = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("indexes"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - indexes = array; - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - } - return new SearchAlias(name, indexes, odataEtag); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchAlias FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchAlias(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.cs deleted file mode 100644 index d0d00e756de9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchAlias.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents an index alias, which describes a mapping from the alias name to an index. The alias name can be used in place of the index name for supported operations. - public partial class SearchAlias - { - /// Initializes a new instance of . - /// The name of the alias. - /// The name of the index this alias maps to. Only one index name may be specified. - /// or is null. - public SearchAlias(string name, IEnumerable indexes) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(indexes, nameof(indexes)); - - Name = name; - Indexes = indexes.ToList(); - } - - /// Initializes a new instance of . - /// The name of the alias. - /// The name of the index this alias maps to. Only one index name may be specified. - /// The ETag of the alias. - internal SearchAlias(string name, IList indexes, string etag) - { - Name = name; - Indexes = indexes; - _etag = etag; - } - - /// The name of the alias. - public string Name { get; set; } - /// The name of the index this alias maps to. Only one index name may be specified. - public IList Indexes { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.Serialization.cs deleted file mode 100644 index 462c32fa00dd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.Serialization.cs +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class SearchDocumentsResult - { - internal static SearchDocumentsResult DeserializeSearchDocumentsResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - long? odataCount = default; - double? searchCoverage = default; - IReadOnlyDictionary> searchFacets = default; - IReadOnlyList searchAnswers = default; - SearchOptions searchNextPageParameters = default; - IReadOnlyList value = default; - string odataNextLink = default; - SemanticErrorReason? searchSemanticPartialResponseReason = default; - SemanticSearchResultsType? searchSemanticPartialResponseType = default; - SemanticQueryRewritesResultType? searchSemanticQueryRewritesResultType = default; - DebugInfo searchDebug = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.count"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - odataCount = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("@search.coverage"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchCoverage = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("@search.facets"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - Dictionary> dictionary = new Dictionary>(); - foreach (var property0 in property.Value.EnumerateObject()) - { - if (property0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property0.Name, null); - } - else - { - List array = new List(); - foreach (var item in property0.Value.EnumerateArray()) - { - array.Add(FacetResult.DeserializeFacetResult(item)); - } - dictionary.Add(property0.Name, array); - } - } - searchFacets = dictionary; - continue; - } - if (property.NameEquals("@search.answers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchAnswers = null; - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(QueryAnswerResult.DeserializeQueryAnswerResult(item)); - } - searchAnswers = array; - continue; - } - if (property.NameEquals("@search.nextPageParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchNextPageParameters = SearchOptions.DeserializeSearchOptions(property.Value); - continue; - } - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchResult.DeserializeSearchResult(item)); - } - value = array; - continue; - } - if (property.NameEquals("@odata.nextLink"u8)) - { - odataNextLink = property.Value.GetString(); - continue; - } - if (property.NameEquals("@search.semanticPartialResponseReason"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchSemanticPartialResponseReason = new SemanticErrorReason(property.Value.GetString()); - continue; - } - if (property.NameEquals("@search.semanticPartialResponseType"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchSemanticPartialResponseType = new SemanticSearchResultsType(property.Value.GetString()); - continue; - } - if (property.NameEquals("@search.semanticQueryRewritesResultType"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchSemanticQueryRewritesResultType = new SemanticQueryRewritesResultType(property.Value.GetString()); - continue; - } - if (property.NameEquals("@search.debug"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchDebug = null; - continue; - } - searchDebug = DebugInfo.DeserializeDebugInfo(property.Value); - continue; - } - } - return new SearchDocumentsResult( - odataCount, - searchCoverage, - searchFacets ?? new ChangeTrackingDictionary>(), - searchAnswers ?? new ChangeTrackingList(), - searchNextPageParameters, - value, - odataNextLink, - searchSemanticPartialResponseReason, - searchSemanticPartialResponseType, - searchSemanticQueryRewritesResultType, - searchDebug); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchDocumentsResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchDocumentsResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.cs deleted file mode 100644 index 972d1a451da5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchDocumentsResult.cs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Models -{ - /// Response containing search results from an index. - internal partial class SearchDocumentsResult - { - /// Initializes a new instance of . - /// The sequence of results returned by the query. - internal SearchDocumentsResult(IEnumerable results) - { - Facets = new ChangeTrackingDictionary>(); - Answers = new ChangeTrackingList(); - Results = results.ToList(); - } - - /// Initializes a new instance of . - /// The total count of results found by the search operation, or null if the count was not requested. If present, the count may be greater than the number of results in this response. This can happen if you use the $top or $skip parameters, or if the query can't return all the requested documents in a single response. - /// A value indicating the percentage of the index that was included in the query, or null if minimumCoverage was not specified in the request. - /// The facet query results for the search operation, organized as a collection of buckets for each faceted field; null if the query did not include any facet expressions. - /// The answers query results for the search operation; null if the answers query parameter was not specified or set to 'none'. - /// Continuation JSON payload returned when the query can't return all the requested results in a single response. You can use this JSON along with @odata.nextLink to formulate another POST Search request to get the next part of the search response. - /// The sequence of results returned by the query. - /// Continuation URL returned when the query can't return all the requested results in a single response. You can use this URL to formulate another GET or POST Search request to get the next part of the search response. Make sure to use the same verb (GET or POST) as the request that produced this response. - /// Reason that a partial response was returned for a semantic ranking request. - /// Type of partial response that was returned for a semantic ranking request. - /// Type of query rewrite that was used to retrieve documents. - /// Debug information that applies to the search results as a whole. - internal SearchDocumentsResult(long? count, double? coverage, IReadOnlyDictionary> facets, IReadOnlyList answers, SearchOptions nextPageParameters, IReadOnlyList results, string nextLink, SemanticErrorReason? semanticPartialResponseReason, SemanticSearchResultsType? semanticPartialResponseType, SemanticQueryRewritesResultType? semanticQueryRewritesResultType, DebugInfo debugInfo) - { - Count = count; - Coverage = coverage; - Facets = facets; - Answers = answers; - NextPageParameters = nextPageParameters; - Results = results; - NextLink = nextLink; - SemanticPartialResponseReason = semanticPartialResponseReason; - SemanticPartialResponseType = semanticPartialResponseType; - SemanticQueryRewritesResultType = semanticQueryRewritesResultType; - DebugInfo = debugInfo; - } - - /// The total count of results found by the search operation, or null if the count was not requested. If present, the count may be greater than the number of results in this response. This can happen if you use the $top or $skip parameters, or if the query can't return all the requested documents in a single response. - public long? Count { get; } - /// A value indicating the percentage of the index that was included in the query, or null if minimumCoverage was not specified in the request. - public double? Coverage { get; } - /// The facet query results for the search operation, organized as a collection of buckets for each faceted field; null if the query did not include any facet expressions. - public IReadOnlyDictionary> Facets { get; } - /// The answers query results for the search operation; null if the answers query parameter was not specified or set to 'none'. - public IReadOnlyList Answers { get; } - /// Continuation JSON payload returned when the query can't return all the requested results in a single response. You can use this JSON along with @odata.nextLink to formulate another POST Search request to get the next part of the search response. - public SearchOptions NextPageParameters { get; } - /// The sequence of results returned by the query. - public IReadOnlyList Results { get; } - /// Continuation URL returned when the query can't return all the requested results in a single response. You can use this URL to formulate another GET or POST Search request to get the next part of the search response. Make sure to use the same verb (GET or POST) as the request that produced this response. - public string NextLink { get; } - /// Reason that a partial response was returned for a semantic ranking request. - public SemanticErrorReason? SemanticPartialResponseReason { get; } - /// Type of partial response that was returned for a semantic ranking request. - public SemanticSearchResultsType? SemanticPartialResponseType { get; } - /// Type of query rewrite that was used to retrieve documents. - public SemanticQueryRewritesResultType? SemanticQueryRewritesResultType { get; } - /// Debug information that applies to the search results as a whole. - public DebugInfo DebugInfo { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.Serialization.cs deleted file mode 100644 index d8edd0e7d8f5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.Serialization.cs +++ /dev/null @@ -1,400 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchField : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type.ToString()); - if (Optional.IsDefined(IsKey)) - { - writer.WritePropertyName("key"u8); - writer.WriteBooleanValue(IsKey.Value); - } - if (Optional.IsDefined(IsRetrievable)) - { - writer.WritePropertyName("retrievable"u8); - writer.WriteBooleanValue(IsRetrievable.Value); - } - if (Optional.IsDefined(IsStored)) - { - writer.WritePropertyName("stored"u8); - writer.WriteBooleanValue(IsStored.Value); - } - if (Optional.IsDefined(IsSearchable)) - { - writer.WritePropertyName("searchable"u8); - writer.WriteBooleanValue(IsSearchable.Value); - } - if (Optional.IsDefined(IsFilterable)) - { - writer.WritePropertyName("filterable"u8); - writer.WriteBooleanValue(IsFilterable.Value); - } - if (Optional.IsDefined(IsSortable)) - { - writer.WritePropertyName("sortable"u8); - writer.WriteBooleanValue(IsSortable.Value); - } - if (Optional.IsDefined(IsFacetable)) - { - writer.WritePropertyName("facetable"u8); - writer.WriteBooleanValue(IsFacetable.Value); - } - if (Optional.IsDefined(AnalyzerName)) - { - if (AnalyzerName != null) - { - writer.WritePropertyName("analyzer"u8); - writer.WriteStringValue(AnalyzerName.Value.ToString()); - } - else - { - writer.WriteNull("analyzer"); - } - } - if (Optional.IsDefined(SearchAnalyzerName)) - { - if (SearchAnalyzerName != null) - { - writer.WritePropertyName("searchAnalyzer"u8); - writer.WriteStringValue(SearchAnalyzerName.Value.ToString()); - } - else - { - writer.WriteNull("searchAnalyzer"); - } - } - if (Optional.IsDefined(IndexAnalyzerName)) - { - if (IndexAnalyzerName != null) - { - writer.WritePropertyName("indexAnalyzer"u8); - writer.WriteStringValue(IndexAnalyzerName.Value.ToString()); - } - else - { - writer.WriteNull("indexAnalyzer"); - } - } - if (Optional.IsDefined(NormalizerName)) - { - if (NormalizerName != null) - { - writer.WritePropertyName("normalizer"u8); - writer.WriteStringValue(NormalizerName.Value.ToString()); - } - else - { - writer.WriteNull("normalizer"); - } - } - if (Optional.IsDefined(VectorSearchDimensions)) - { - if (VectorSearchDimensions != null) - { - writer.WritePropertyName("dimensions"u8); - writer.WriteNumberValue(VectorSearchDimensions.Value); - } - else - { - writer.WriteNull("dimensions"); - } - } - if (Optional.IsDefined(VectorSearchProfileName)) - { - if (VectorSearchProfileName != null) - { - writer.WritePropertyName("vectorSearchProfile"u8); - writer.WriteStringValue(VectorSearchProfileName); - } - else - { - writer.WriteNull("vectorSearchProfile"); - } - } - if (Optional.IsDefined(VectorEncodingFormat)) - { - if (VectorEncodingFormat != null) - { - writer.WritePropertyName("vectorEncoding"u8); - writer.WriteStringValue(VectorEncodingFormat.Value.ToString()); - } - else - { - writer.WriteNull("vectorEncoding"); - } - } - if (Optional.IsCollectionDefined(SynonymMapNames)) - { - writer.WritePropertyName("synonymMaps"u8); - writer.WriteStartArray(); - foreach (var item in SynonymMapNames) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Fields)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStartArray(); - foreach (var item in Fields) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static SearchField DeserializeSearchField(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - SearchFieldDataType type = default; - bool? key = default; - bool? retrievable = default; - bool? stored = default; - bool? searchable = default; - bool? filterable = default; - bool? sortable = default; - bool? facetable = default; - LexicalAnalyzerName? analyzer = default; - LexicalAnalyzerName? searchAnalyzer = default; - LexicalAnalyzerName? indexAnalyzer = default; - LexicalNormalizerName? normalizer = default; - int? dimensions = default; - string vectorSearchProfile = default; - VectorEncodingFormat? vectorEncoding = default; - IList synonymMaps = default; - IList fields = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("type"u8)) - { - type = new SearchFieldDataType(property.Value.GetString()); - continue; - } - if (property.NameEquals("key"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - key = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("retrievable"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - retrievable = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("stored"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - stored = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("searchable"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchable = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("filterable"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - filterable = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("sortable"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - sortable = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("facetable"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - facetable = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("analyzer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - analyzer = null; - continue; - } - analyzer = new LexicalAnalyzerName(property.Value.GetString()); - continue; - } - if (property.NameEquals("searchAnalyzer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchAnalyzer = null; - continue; - } - searchAnalyzer = new LexicalAnalyzerName(property.Value.GetString()); - continue; - } - if (property.NameEquals("indexAnalyzer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - indexAnalyzer = null; - continue; - } - indexAnalyzer = new LexicalAnalyzerName(property.Value.GetString()); - continue; - } - if (property.NameEquals("normalizer"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - normalizer = null; - continue; - } - normalizer = new LexicalNormalizerName(property.Value.GetString()); - continue; - } - if (property.NameEquals("dimensions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dimensions = null; - continue; - } - dimensions = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("vectorSearchProfile"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - vectorSearchProfile = null; - continue; - } - vectorSearchProfile = property.Value.GetString(); - continue; - } - if (property.NameEquals("vectorEncoding"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - vectorEncoding = null; - continue; - } - vectorEncoding = new VectorEncodingFormat(property.Value.GetString()); - continue; - } - if (property.NameEquals("synonymMaps"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - synonymMaps = array; - continue; - } - if (property.NameEquals("fields"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(DeserializeSearchField(item)); - } - fields = array; - continue; - } - } - return new SearchField( - name, - type, - key, - retrievable, - stored, - searchable, - filterable, - sortable, - facetable, - analyzer, - searchAnalyzer, - indexAnalyzer, - normalizer, - dimensions, - vectorSearchProfile, - vectorEncoding, - synonymMaps ?? new ChangeTrackingList(), - fields ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchField FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchField(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.cs deleted file mode 100644 index 4bbad7499be6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchField.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a field in an index definition, which describes the name, data type, and search behavior of a field. - public partial class SearchField - { - /// Initializes a new instance of . - /// The name of the field, which must be unique within the fields collection of the index or parent field. - /// The data type of the field. - /// A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type Edm.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is false for simple fields and null for complex fields. - /// A value indicating whether the field can be returned in a search result. You can disable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be true for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is true for simple fields, false for vector fields, and null for complex fields. - /// An immutable value indicating whether the field will be persisted separately on disk to be returned in a search result. You can disable this option if you don't plan to return the field contents in a search response to save on storage overhead. This can only be set during index creation and only for vector fields. This property cannot be changed for existing fields or set as false for new fields. If this property is set as false, the property 'retrievable' must also be set to false. This property must be true or unset for key fields, for new fields, and for non-vector fields, and it must be null for complex fields. Disabling this property will reduce index storage requirements. The default is true for vector fields. - /// A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and "day". This enables full-text searches for these terms. Fields of type Edm.String or Collection(Edm.String) are searchable by default. This property must be false for simple fields of other non-string data types, and it must be null for complex fields. Note: searchable fields consume extra space in your index to accommodate additional tokenized versions of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. - /// A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type Edm.String or Collection(Edm.String) that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. This property must be null for complex fields. Default is true for simple fields and null for complex fields. - /// A value indicating whether to enable the field to be referenced in $orderby expressions. By default, the search engine sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection fields cannot be sortable, since they are multi-valued. Simple sub-fields of complex collections are also multi-valued, and therefore cannot be sortable. This is true whether it's an immediate parent field, or an ancestor field, that's the complex collection. Complex fields cannot be sortable and the sortable property must be null for such fields. The default for sortable is true for single-valued simple fields, false for multi-valued simple fields, and null for complex fields. - /// A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). This property must be null for complex fields. Fields of type Edm.GeographyPoint or Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all other simple fields. - /// The name of the analyzer to use for the field. This option can be used only with searchable fields and it can't be set together with either searchAnalyzer or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the field. Must be null for complex fields. - /// The name of the analyzer used at search time for the field. This option can be used only with searchable fields. It must be set together with indexAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. This analyzer can be updated on an existing field. Must be null for complex fields. - /// The name of the analyzer used at indexing time for the field. This option can be used only with searchable fields. It must be set together with searchAnalyzer and it cannot be set together with the analyzer option. This property cannot be set to the name of a language analyzer; use the analyzer property instead if you need a language analyzer. Once the analyzer is chosen, it cannot be changed for the field. Must be null for complex fields. - /// The name of the normalizer to use for the field. This option can be used only with fields with filterable, sortable, or facetable enabled. Once the normalizer is chosen, it cannot be changed for the field. Must be null for complex fields. - /// The dimensionality of the vector field. - /// The name of the vector search profile that specifies the algorithm and vectorizer to use when searching the vector field. - /// The encoding format to interpret the field contents. - /// A list of the names of synonym maps to associate with this field. This option can be used only with searchable fields. Currently only one synonym map per field is supported. Assigning a synonym map to a field ensures that query terms targeting that field are expanded at query-time using the rules in the synonym map. This attribute can be changed on existing fields. Must be null or an empty collection for complex fields. - /// A list of sub-fields if this is a field of type Edm.ComplexType or Collection(Edm.ComplexType). Must be null or empty for simple fields. - internal SearchField(string name, SearchFieldDataType type, bool? isKey, bool? isRetrievable, bool? isStored, bool? isSearchable, bool? isFilterable, bool? isSortable, bool? isFacetable, LexicalAnalyzerName? analyzerName, LexicalAnalyzerName? searchAnalyzerName, LexicalAnalyzerName? indexAnalyzerName, LexicalNormalizerName? normalizerName, int? vectorSearchDimensions, string vectorSearchProfileName, VectorEncodingFormat? vectorEncodingFormat, IList synonymMapNames, IList fields) - { - Name = name; - Type = type; - IsKey = isKey; - IsRetrievable = isRetrievable; - IsStored = isStored; - IsSearchable = isSearchable; - IsFilterable = isFilterable; - IsSortable = isSortable; - IsFacetable = isFacetable; - AnalyzerName = analyzerName; - SearchAnalyzerName = searchAnalyzerName; - IndexAnalyzerName = indexAnalyzerName; - NormalizerName = normalizerName; - VectorSearchDimensions = vectorSearchDimensions; - VectorSearchProfileName = vectorSearchProfileName; - VectorEncodingFormat = vectorEncodingFormat; - SynonymMapNames = synonymMapNames; - Fields = fields; - } - /// The dimensionality of the vector field. - public int? VectorSearchDimensions { get; set; } - /// The name of the vector search profile that specifies the algorithm and vectorizer to use when searching the vector field. - public string VectorSearchProfileName { get; set; } - /// The encoding format to interpret the field contents. - public VectorEncodingFormat? VectorEncodingFormat { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchFieldDataType.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchFieldDataType.cs deleted file mode 100644 index 557c0c2ee721..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchFieldDataType.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the data type of a field in a search index. - public readonly partial struct SearchFieldDataType : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public SearchFieldDataType(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string StringValue = "Edm.String"; - private const string Int32Value = "Edm.Int32"; - private const string Int64Value = "Edm.Int64"; - private const string DoubleValue = "Edm.Double"; - private const string BooleanValue = "Edm.Boolean"; - private const string DateTimeOffsetValue = "Edm.DateTimeOffset"; - private const string GeographyPointValue = "Edm.GeographyPoint"; - private const string ComplexValue = "Edm.ComplexType"; - private const string SingleValue = "Edm.Single"; - private const string HalfValue = "Edm.Half"; - private const string Int16Value = "Edm.Int16"; - private const string SByteValue = "Edm.SByte"; - private const string ByteValue = "Edm.Byte"; - /// Indicates that a field contains a half-precision floating point number. This is only valid when used with Collection(Edm.Half). - public static SearchFieldDataType Half { get; } = new SearchFieldDataType(HalfValue); - /// Determines if two values are the same. - public static bool operator ==(SearchFieldDataType left, SearchFieldDataType right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(SearchFieldDataType left, SearchFieldDataType right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator SearchFieldDataType(string value) => new SearchFieldDataType(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is SearchFieldDataType other && Equals(other); - /// - public bool Equals(SearchFieldDataType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.Serialization.cs deleted file mode 100644 index 44028bd8351e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.Serialization.cs +++ /dev/null @@ -1,396 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndex : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("fields"u8); - writer.WriteStartArray(); - foreach (var item in _fields) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - if (Optional.IsCollectionDefined(ScoringProfiles)) - { - writer.WritePropertyName("scoringProfiles"u8); - writer.WriteStartArray(); - foreach (var item in ScoringProfiles) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(DefaultScoringProfile)) - { - writer.WritePropertyName("defaultScoringProfile"u8); - writer.WriteStringValue(DefaultScoringProfile); - } - if (Optional.IsDefined(CorsOptions)) - { - if (CorsOptions != null) - { - writer.WritePropertyName("corsOptions"u8); - writer.WriteObjectValue(CorsOptions); - } - else - { - writer.WriteNull("corsOptions"); - } - } - if (Optional.IsCollectionDefined(Suggesters)) - { - writer.WritePropertyName("suggesters"u8); - writer.WriteStartArray(); - foreach (var item in Suggesters) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Analyzers)) - { - writer.WritePropertyName("analyzers"u8); - writer.WriteStartArray(); - foreach (var item in Analyzers) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Tokenizers)) - { - writer.WritePropertyName("tokenizers"u8); - writer.WriteStartArray(); - foreach (var item in Tokenizers) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(TokenFilters)) - { - writer.WritePropertyName("tokenFilters"u8); - writer.WriteStartArray(); - foreach (var item in TokenFilters) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(CharFilters)) - { - writer.WritePropertyName("charFilters"u8); - writer.WriteStartArray(); - foreach (var item in CharFilters) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Normalizers)) - { - writer.WritePropertyName("normalizers"u8); - writer.WriteStartArray(); - foreach (var item in Normalizers) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(EncryptionKey)) - { - if (EncryptionKey != null) - { - writer.WritePropertyName("encryptionKey"u8); - writer.WriteObjectValue(EncryptionKey); - } - else - { - writer.WriteNull("encryptionKey"); - } - } - if (Optional.IsDefined(Similarity)) - { - writer.WritePropertyName("similarity"u8); - writer.WriteObjectValue(Similarity); - } - if (Optional.IsDefined(SemanticSearch)) - { - if (SemanticSearch != null) - { - writer.WritePropertyName("semantic"u8); - writer.WriteObjectValue(SemanticSearch); - } - else - { - writer.WriteNull("semantic"); - } - } - if (Optional.IsDefined(VectorSearch)) - { - if (VectorSearch != null) - { - writer.WritePropertyName("vectorSearch"u8); - writer.WriteObjectValue(VectorSearch); - } - else - { - writer.WriteNull("vectorSearch"); - } - } - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - writer.WriteEndObject(); - } - - internal static SearchIndex DeserializeSearchIndex(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - IList fields = default; - IList scoringProfiles = default; - string defaultScoringProfile = default; - CorsOptions corsOptions = default; - IList suggesters = default; - IList analyzers = default; - IList tokenizers = default; - IList tokenFilters = default; - IList charFilters = default; - IList normalizers = default; - SearchResourceEncryptionKey encryptionKey = default; - SimilarityAlgorithm similarity = default; - SemanticSearch semantic = default; - VectorSearch vectorSearch = default; - string odataEtag = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("fields"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchField.DeserializeSearchField(item)); - } - fields = array; - continue; - } - if (property.NameEquals("scoringProfiles"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(ScoringProfile.DeserializeScoringProfile(item)); - } - scoringProfiles = array; - continue; - } - if (property.NameEquals("defaultScoringProfile"u8)) - { - defaultScoringProfile = property.Value.GetString(); - continue; - } - if (property.NameEquals("corsOptions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - corsOptions = null; - continue; - } - corsOptions = CorsOptions.DeserializeCorsOptions(property.Value); - continue; - } - if (property.NameEquals("suggesters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchSuggester.DeserializeSearchSuggester(item)); - } - suggesters = array; - continue; - } - if (property.NameEquals("analyzers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(LexicalAnalyzer.DeserializeLexicalAnalyzer(item)); - } - analyzers = array; - continue; - } - if (property.NameEquals("tokenizers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(LexicalTokenizer.DeserializeLexicalTokenizer(item)); - } - tokenizers = array; - continue; - } - if (property.NameEquals("tokenFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(TokenFilter.DeserializeTokenFilter(item)); - } - tokenFilters = array; - continue; - } - if (property.NameEquals("charFilters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(CharFilter.DeserializeCharFilter(item)); - } - charFilters = array; - continue; - } - if (property.NameEquals("normalizers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(LexicalNormalizer.DeserializeLexicalNormalizer(item)); - } - normalizers = array; - continue; - } - if (property.NameEquals("encryptionKey"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encryptionKey = null; - continue; - } - encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value); - continue; - } - if (property.NameEquals("similarity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - similarity = SimilarityAlgorithm.DeserializeSimilarityAlgorithm(property.Value); - continue; - } - if (property.NameEquals("semantic"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - semantic = null; - continue; - } - semantic = SemanticSearch.DeserializeSemanticSearch(property.Value); - continue; - } - if (property.NameEquals("vectorSearch"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - vectorSearch = null; - continue; - } - vectorSearch = VectorSearch.DeserializeVectorSearch(property.Value); - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - } - return new SearchIndex( - name, - fields, - scoringProfiles ?? new ChangeTrackingList(), - defaultScoringProfile, - corsOptions, - suggesters ?? new ChangeTrackingList(), - analyzers ?? new ChangeTrackingList(), - tokenizers ?? new ChangeTrackingList(), - tokenFilters ?? new ChangeTrackingList(), - charFilters ?? new ChangeTrackingList(), - normalizers ?? new ChangeTrackingList(), - encryptionKey, - similarity, - semantic, - vectorSearch, - odataEtag); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndex FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndex(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.cs deleted file mode 100644 index dfaafc2d2553..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndex.cs +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a search index definition, which describes the fields and search behavior of an index. - public partial class SearchIndex - { - /// Initializes a new instance of . - /// The name of the index. - /// The fields of the index. - /// The scoring profiles for the index. - /// The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. - /// Options to control Cross-Origin Resource Sharing (CORS) for the index. - /// The suggesters for the index. - /// - /// The analyzers for the index. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - /// - /// The tokenizers for the index. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , and . - /// - /// - /// The token filters for the index. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , and . - /// - /// - /// The character filters for the index. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// - /// The normalizers for the index. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include . - /// - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - /// - /// The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// Defines parameters for a search index that influence semantic capabilities. - /// Contains configuration options related to vector search. - /// The ETag of the index. - internal SearchIndex(string name, IList fields, IList scoringProfiles, string defaultScoringProfile, CorsOptions corsOptions, IList suggesters, IList analyzers, IList tokenizers, IList tokenFilters, IList charFilters, IList normalizers, SearchResourceEncryptionKey encryptionKey, SimilarityAlgorithm similarity, SemanticSearch semanticSearch, VectorSearch vectorSearch, string etag) - { - Name = name; - _fields = fields; - ScoringProfiles = scoringProfiles; - DefaultScoringProfile = defaultScoringProfile; - CorsOptions = corsOptions; - Suggesters = suggesters; - Analyzers = analyzers; - Tokenizers = tokenizers; - TokenFilters = tokenFilters; - CharFilters = charFilters; - Normalizers = normalizers; - EncryptionKey = encryptionKey; - Similarity = similarity; - SemanticSearch = semanticSearch; - VectorSearch = vectorSearch; - _etag = etag; - } - /// The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. - public string DefaultScoringProfile { get; set; } - /// Options to control Cross-Origin Resource Sharing (CORS) for the index. - public CorsOptions CorsOptions { get; set; } - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - public SearchResourceEncryptionKey EncryptionKey { get; set; } - /// - /// The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SimilarityAlgorithm Similarity { get; set; } - /// Defines parameters for a search index that influence semantic capabilities. - public SemanticSearch SemanticSearch { get; set; } - /// Contains configuration options related to vector search. - public VectorSearch VectorSearch { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.Serialization.cs deleted file mode 100644 index db8623c6e853..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.Serialization.cs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexStatistics - { - internal static SearchIndexStatistics DeserializeSearchIndexStatistics(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - long documentCount = default; - long storageSize = default; - long vectorIndexSize = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("documentCount"u8)) - { - documentCount = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("storageSize"u8)) - { - storageSize = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("vectorIndexSize"u8)) - { - vectorIndexSize = property.Value.GetInt64(); - continue; - } - } - return new SearchIndexStatistics(documentCount, storageSize, vectorIndexSize); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexStatistics FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexStatistics(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.cs deleted file mode 100644 index 6dcacbea3f3d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexStatistics.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Statistics for a given index. Statistics are collected periodically and are not guaranteed to always be up-to-date. - public partial class SearchIndexStatistics - { - /// Initializes a new instance of . - /// The number of documents in the index. - /// The amount of storage in bytes consumed by the index. - /// The amount of memory in bytes consumed by vectors in the index. - internal SearchIndexStatistics(long documentCount, long storageSize, long vectorIndexSize) - { - DocumentCount = documentCount; - StorageSize = storageSize; - VectorIndexSize = vectorIndexSize; - } - - /// The number of documents in the index. - public long DocumentCount { get; } - /// The amount of storage in bytes consumed by the index. - public long StorageSize { get; } - /// The amount of memory in bytes consumed by vectors in the index. - public long VectorIndexSize { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.Serialization.cs deleted file mode 100644 index e2f6013c6e34..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.Serialization.cs +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WritePropertyName("dataSourceName"u8); - writer.WriteStringValue(DataSourceName); - if (Optional.IsDefined(SkillsetName)) - { - writer.WritePropertyName("skillsetName"u8); - writer.WriteStringValue(SkillsetName); - } - writer.WritePropertyName("targetIndexName"u8); - writer.WriteStringValue(TargetIndexName); - if (Optional.IsDefined(Schedule)) - { - if (Schedule != null) - { - writer.WritePropertyName("schedule"u8); - writer.WriteObjectValue(Schedule); - } - else - { - writer.WriteNull("schedule"); - } - } - if (Optional.IsDefined(Parameters)) - { - if (Parameters != null) - { - writer.WritePropertyName("parameters"u8); - writer.WriteObjectValue(Parameters); - } - else - { - writer.WriteNull("parameters"); - } - } - if (Optional.IsCollectionDefined(FieldMappings)) - { - writer.WritePropertyName("fieldMappings"u8); - writer.WriteStartArray(); - foreach (var item in FieldMappings) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(OutputFieldMappings)) - { - writer.WritePropertyName("outputFieldMappings"u8); - writer.WriteStartArray(); - foreach (var item in OutputFieldMappings) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(IsDisabled)) - { - if (IsDisabled != null) - { - writer.WritePropertyName("disabled"u8); - writer.WriteBooleanValue(IsDisabled.Value); - } - else - { - writer.WriteNull("disabled"); - } - } - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - if (Optional.IsDefined(EncryptionKey)) - { - if (EncryptionKey != null) - { - writer.WritePropertyName("encryptionKey"u8); - writer.WriteObjectValue(EncryptionKey); - } - else - { - writer.WriteNull("encryptionKey"); - } - } - if (Optional.IsDefined(Cache)) - { - if (Cache != null) - { - writer.WritePropertyName("cache"u8); - writer.WriteObjectValue(Cache); - } - else - { - writer.WriteNull("cache"); - } - } - writer.WriteEndObject(); - } - - internal static SearchIndexer DeserializeSearchIndexer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string description = default; - string dataSourceName = default; - string skillsetName = default; - string targetIndexName = default; - IndexingSchedule schedule = default; - IndexingParameters parameters = default; - IList fieldMappings = default; - IList outputFieldMappings = default; - bool? disabled = default; - string odataEtag = default; - SearchResourceEncryptionKey encryptionKey = default; - SearchIndexerCache cache = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("dataSourceName"u8)) - { - dataSourceName = property.Value.GetString(); - continue; - } - if (property.NameEquals("skillsetName"u8)) - { - skillsetName = property.Value.GetString(); - continue; - } - if (property.NameEquals("targetIndexName"u8)) - { - targetIndexName = property.Value.GetString(); - continue; - } - if (property.NameEquals("schedule"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - schedule = null; - continue; - } - schedule = IndexingSchedule.DeserializeIndexingSchedule(property.Value); - continue; - } - if (property.NameEquals("parameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - parameters = null; - continue; - } - parameters = IndexingParameters.DeserializeIndexingParameters(property.Value); - continue; - } - if (property.NameEquals("fieldMappings"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(FieldMapping.DeserializeFieldMapping(item)); - } - fieldMappings = array; - continue; - } - if (property.NameEquals("outputFieldMappings"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(FieldMapping.DeserializeFieldMapping(item)); - } - outputFieldMappings = array; - continue; - } - if (property.NameEquals("disabled"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - disabled = null; - continue; - } - disabled = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - if (property.NameEquals("encryptionKey"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encryptionKey = null; - continue; - } - encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value); - continue; - } - if (property.NameEquals("cache"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - cache = null; - continue; - } - cache = SearchIndexerCache.DeserializeSearchIndexerCache(property.Value); - continue; - } - } - return new SearchIndexer( - name, - description, - dataSourceName, - skillsetName, - targetIndexName, - schedule, - parameters, - fieldMappings ?? new ChangeTrackingList(), - outputFieldMappings ?? new ChangeTrackingList(), - disabled, - odataEtag, - encryptionKey, - cache); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.cs deleted file mode 100644 index 2d851fc20771..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexer.cs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents an indexer. - public partial class SearchIndexer - { - /// Initializes a new instance of . - /// The name of the indexer. - /// The name of the datasource from which this indexer reads data. - /// The name of the index to which this indexer writes data. - /// , or is null. - public SearchIndexer(string name, string dataSourceName, string targetIndexName) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(dataSourceName, nameof(dataSourceName)); - Argument.AssertNotNull(targetIndexName, nameof(targetIndexName)); - - Name = name; - DataSourceName = dataSourceName; - TargetIndexName = targetIndexName; - FieldMappings = new ChangeTrackingList(); - OutputFieldMappings = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The name of the indexer. - /// The description of the indexer. - /// The name of the datasource from which this indexer reads data. - /// The name of the skillset executing with this indexer. - /// The name of the index to which this indexer writes data. - /// The schedule for this indexer. - /// Parameters for indexer execution. - /// Defines mappings between fields in the data source and corresponding target fields in the index. - /// Output field mappings are applied after enrichment and immediately before indexing. - /// A value indicating whether the indexer is disabled. Default is false. - /// The ETag of the indexer. - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - /// Adds caching to an enrichment pipeline to allow for incremental modification steps without having to rebuild the index every time. - internal SearchIndexer(string name, string description, string dataSourceName, string skillsetName, string targetIndexName, IndexingSchedule schedule, IndexingParameters parameters, IList fieldMappings, IList outputFieldMappings, bool? isDisabled, string etag, SearchResourceEncryptionKey encryptionKey, SearchIndexerCache cache) - { - Name = name; - Description = description; - DataSourceName = dataSourceName; - SkillsetName = skillsetName; - TargetIndexName = targetIndexName; - Schedule = schedule; - Parameters = parameters; - FieldMappings = fieldMappings; - OutputFieldMappings = outputFieldMappings; - IsDisabled = isDisabled; - _etag = etag; - EncryptionKey = encryptionKey; - Cache = cache; - } - - /// The name of the indexer. - public string Name { get; set; } - /// The description of the indexer. - public string Description { get; set; } - /// The name of the datasource from which this indexer reads data. - public string DataSourceName { get; set; } - /// The name of the skillset executing with this indexer. - public string SkillsetName { get; set; } - /// The name of the index to which this indexer writes data. - public string TargetIndexName { get; set; } - /// The schedule for this indexer. - public IndexingSchedule Schedule { get; set; } - /// Parameters for indexer execution. - public IndexingParameters Parameters { get; set; } - /// A value indicating whether the indexer is disabled. Default is false. - public bool? IsDisabled { get; set; } - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - public SearchResourceEncryptionKey EncryptionKey { get; set; } - /// Adds caching to an enrichment pipeline to allow for incremental modification steps without having to rebuild the index every time. - public SearchIndexerCache Cache { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.Serialization.cs deleted file mode 100644 index d08e1ac14ee6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.Serialization.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerCache : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(StorageConnectionString)) - { - writer.WritePropertyName("storageConnectionString"u8); - writer.WriteStringValue(StorageConnectionString); - } - if (Optional.IsDefined(EnableReprocessing)) - { - if (EnableReprocessing != null) - { - writer.WritePropertyName("enableReprocessing"u8); - writer.WriteBooleanValue(EnableReprocessing.Value); - } - else - { - writer.WriteNull("enableReprocessing"); - } - } - if (Optional.IsDefined(Identity)) - { - if (Identity != null) - { - writer.WritePropertyName("identity"u8); - writer.WriteObjectValue(Identity); - } - else - { - writer.WriteNull("identity"); - } - } - writer.WriteEndObject(); - } - - internal static SearchIndexerCache DeserializeSearchIndexerCache(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string storageConnectionString = default; - bool? enableReprocessing = default; - SearchIndexerDataIdentity identity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("storageConnectionString"u8)) - { - storageConnectionString = property.Value.GetString(); - continue; - } - if (property.NameEquals("enableReprocessing"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - enableReprocessing = null; - continue; - } - enableReprocessing = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("identity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - identity = null; - continue; - } - identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - } - return new SearchIndexerCache(storageConnectionString, enableReprocessing, identity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerCache FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerCache(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.cs deleted file mode 100644 index bddd36fa0a5a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerCache.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The SearchIndexerCache. - public partial class SearchIndexerCache - { - /// Initializes a new instance of . - public SearchIndexerCache() - { - } - - /// Initializes a new instance of . - /// The connection string to the storage account where the cache data will be persisted. - /// Specifies whether incremental reprocessing is enabled. - /// - /// The user-assigned managed identity used for connections to the enrichment cache. If the connection string indicates an identity (ResourceId) and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal SearchIndexerCache(string storageConnectionString, bool? enableReprocessing, SearchIndexerDataIdentity identity) - { - StorageConnectionString = storageConnectionString; - EnableReprocessing = enableReprocessing; - Identity = identity; - } - /// Specifies whether incremental reprocessing is enabled. - public bool? EnableReprocessing { get; set; } - /// - /// The user-assigned managed identity used for connections to the enrichment cache. If the connection string indicates an identity (ResourceId) and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity Identity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.Serialization.cs deleted file mode 100644 index 8c8309cbaa03..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerDataContainer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Query)) - { - writer.WritePropertyName("query"u8); - writer.WriteStringValue(Query); - } - writer.WriteEndObject(); - } - - internal static SearchIndexerDataContainer DeserializeSearchIndexerDataContainer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string query = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("query"u8)) - { - query = property.Value.GetString(); - continue; - } - } - return new SearchIndexerDataContainer(name, query); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerDataContainer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerDataContainer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.cs deleted file mode 100644 index ee53b07012c5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataContainer.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents information about the entity (such as Azure SQL table or CosmosDB collection) that will be indexed. - public partial class SearchIndexerDataContainer - { - /// Initializes a new instance of . - /// The name of the table or view (for Azure SQL data source) or collection (for CosmosDB data source) that will be indexed. - /// is null. - public SearchIndexerDataContainer(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - } - - /// Initializes a new instance of . - /// The name of the table or view (for Azure SQL data source) or collection (for CosmosDB data source) that will be indexed. - /// A query that is applied to this data container. The syntax and meaning of this parameter is datasource-specific. Not supported by Azure SQL datasources. - internal SearchIndexerDataContainer(string name, string query) - { - Name = name; - Query = query; - } - - /// The name of the table or view (for Azure SQL data source) or collection (for CosmosDB data source) that will be indexed. - public string Name { get; set; } - /// A query that is applied to this data container. The syntax and meaning of this parameter is datasource-specific. Not supported by Azure SQL datasources. - public string Query { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.Serialization.cs deleted file mode 100644 index c718810f29f1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerDataIdentity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SearchIndexerDataIdentity DeserializeSearchIndexerDataIdentity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.DataNoneIdentity": return SearchIndexerDataNoneIdentity.DeserializeSearchIndexerDataNoneIdentity(element); - case "#Microsoft.Azure.Search.DataUserAssignedIdentity": return SearchIndexerDataUserAssignedIdentity.DeserializeSearchIndexerDataUserAssignedIdentity(element); - } - } - return UnknownSearchIndexerDataIdentity.DeserializeUnknownSearchIndexerDataIdentity(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerDataIdentity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerDataIdentity(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.cs deleted file mode 100644 index b60660581053..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataIdentity.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Abstract base type for data identities. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public abstract partial class SearchIndexerDataIdentity - { - /// Initializes a new instance of . - public SearchIndexerDataIdentity() - { - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of identity. - internal SearchIndexerDataIdentity(string oDataType) - { - ODataType = oDataType; - } - - /// A URI fragment specifying the type of identity. - internal string ODataType { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.Serialization.cs deleted file mode 100644 index 7cacdce4e8b7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerDataNoneIdentity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SearchIndexerDataNoneIdentity DeserializeSearchIndexerDataNoneIdentity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new SearchIndexerDataNoneIdentity(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SearchIndexerDataNoneIdentity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerDataNoneIdentity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.cs deleted file mode 100644 index 1ca02ea386cf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataNoneIdentity.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Clears the identity property of a datasource. - public partial class SearchIndexerDataNoneIdentity : SearchIndexerDataIdentity - { - /// Initializes a new instance of . - public SearchIndexerDataNoneIdentity() - { - ODataType = "#Microsoft.Azure.Search.DataNoneIdentity"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of identity. - internal SearchIndexerDataNoneIdentity(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "#Microsoft.Azure.Search.DataNoneIdentity"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.Serialization.cs deleted file mode 100644 index 6ee8a098669e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.Serialization.cs +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerDataSourceConnection : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type.ToString()); - writer.WritePropertyName("credentials"u8); - writer.WriteObjectValue(CredentialsInternal); - writer.WritePropertyName("container"u8); - writer.WriteObjectValue(Container); - if (Optional.IsDefined(Identity)) - { - if (Identity != null) - { - writer.WritePropertyName("identity"u8); - writer.WriteObjectValue(Identity); - } - else - { - writer.WriteNull("identity"); - } - } - if (Optional.IsDefined(DataChangeDetectionPolicy)) - { - if (DataChangeDetectionPolicy != null) - { - writer.WritePropertyName("dataChangeDetectionPolicy"u8); - writer.WriteObjectValue(DataChangeDetectionPolicy); - } - else - { - writer.WriteNull("dataChangeDetectionPolicy"); - } - } - if (Optional.IsDefined(DataDeletionDetectionPolicy)) - { - if (DataDeletionDetectionPolicy != null) - { - writer.WritePropertyName("dataDeletionDetectionPolicy"u8); - writer.WriteObjectValue(DataDeletionDetectionPolicy); - } - else - { - writer.WriteNull("dataDeletionDetectionPolicy"); - } - } - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - if (Optional.IsDefined(EncryptionKey)) - { - if (EncryptionKey != null) - { - writer.WritePropertyName("encryptionKey"u8); - writer.WriteObjectValue(EncryptionKey); - } - else - { - writer.WriteNull("encryptionKey"); - } - } - writer.WriteEndObject(); - } - - internal static SearchIndexerDataSourceConnection DeserializeSearchIndexerDataSourceConnection(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string description = default; - SearchIndexerDataSourceType type = default; - DataSourceCredentials credentials = default; - SearchIndexerDataContainer container = default; - SearchIndexerDataIdentity identity = default; - DataChangeDetectionPolicy dataChangeDetectionPolicy = default; - DataDeletionDetectionPolicy dataDeletionDetectionPolicy = default; - string odataEtag = default; - SearchResourceEncryptionKey encryptionKey = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("type"u8)) - { - type = new SearchIndexerDataSourceType(property.Value.GetString()); - continue; - } - if (property.NameEquals("credentials"u8)) - { - credentials = DataSourceCredentials.DeserializeDataSourceCredentials(property.Value); - continue; - } - if (property.NameEquals("container"u8)) - { - container = SearchIndexerDataContainer.DeserializeSearchIndexerDataContainer(property.Value); - continue; - } - if (property.NameEquals("identity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - identity = null; - continue; - } - identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("dataChangeDetectionPolicy"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dataChangeDetectionPolicy = null; - continue; - } - dataChangeDetectionPolicy = DataChangeDetectionPolicy.DeserializeDataChangeDetectionPolicy(property.Value); - continue; - } - if (property.NameEquals("dataDeletionDetectionPolicy"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - dataDeletionDetectionPolicy = null; - continue; - } - dataDeletionDetectionPolicy = DataDeletionDetectionPolicy.DeserializeDataDeletionDetectionPolicy(property.Value); - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - if (property.NameEquals("encryptionKey"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encryptionKey = null; - continue; - } - encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value); - continue; - } - } - return new SearchIndexerDataSourceConnection( - name, - description, - type, - credentials, - container, - identity, - dataChangeDetectionPolicy, - dataDeletionDetectionPolicy, - odataEtag, - encryptionKey); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerDataSourceConnection FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerDataSourceConnection(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.cs deleted file mode 100644 index 675a4a69a7bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceConnection.cs +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a datasource definition, which can be used to configure an indexer. - public partial class SearchIndexerDataSourceConnection - { - /// Initializes a new instance of . - /// The name of the datasource. - /// The description of the datasource. - /// The type of the datasource. - /// Credentials for the datasource. - /// The data container for the datasource. - /// - /// An explicit managed identity to use for this datasource. If not specified and the connection string is a managed identity, the system-assigned managed identity is used. If not specified, the value remains unchanged. If "none" is specified, the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// - /// The data change detection policy for the datasource. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// - /// The data deletion detection policy for the datasource. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The ETag of the data source. - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - internal SearchIndexerDataSourceConnection(string name, string description, SearchIndexerDataSourceType type, DataSourceCredentials credentialsInternal, SearchIndexerDataContainer container, SearchIndexerDataIdentity identity, DataChangeDetectionPolicy dataChangeDetectionPolicy, DataDeletionDetectionPolicy dataDeletionDetectionPolicy, string etag, SearchResourceEncryptionKey encryptionKey) - { - Name = name; - Description = description; - Type = type; - CredentialsInternal = credentialsInternal; - Container = container; - Identity = identity; - DataChangeDetectionPolicy = dataChangeDetectionPolicy; - DataDeletionDetectionPolicy = dataDeletionDetectionPolicy; - _etag = etag; - EncryptionKey = encryptionKey; - } - - /// The name of the datasource. - public string Name { get; set; } - /// The description of the datasource. - public string Description { get; set; } - /// The type of the datasource. - public SearchIndexerDataSourceType Type { get; set; } - /// The data container for the datasource. - public SearchIndexerDataContainer Container { get; set; } - /// - /// An explicit managed identity to use for this datasource. If not specified and the connection string is a managed identity, the system-assigned managed identity is used. If not specified, the value remains unchanged. If "none" is specified, the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity Identity { get; set; } - /// - /// The data change detection policy for the datasource. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public DataChangeDetectionPolicy DataChangeDetectionPolicy { get; set; } - /// - /// The data deletion detection policy for the datasource. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public DataDeletionDetectionPolicy DataDeletionDetectionPolicy { get; set; } - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - public SearchResourceEncryptionKey EncryptionKey { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.Serialization.cs deleted file mode 100644 index 4b31e19f6776..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerDataUserAssignedIdentity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("userAssignedIdentity"u8); - writer.WriteStringValue(UserAssignedIdentity); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SearchIndexerDataUserAssignedIdentity DeserializeSearchIndexerDataUserAssignedIdentity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ResourceIdentifier userAssignedIdentity = default; - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("userAssignedIdentity"u8)) - { - userAssignedIdentity = new ResourceIdentifier(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new SearchIndexerDataUserAssignedIdentity(odataType, userAssignedIdentity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SearchIndexerDataUserAssignedIdentity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerDataUserAssignedIdentity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.cs deleted file mode 100644 index e1d03e6a8fa3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataUserAssignedIdentity.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the identity for a datasource to use. - public partial class SearchIndexerDataUserAssignedIdentity : SearchIndexerDataIdentity - { - /// Initializes a new instance of . - /// The fully qualified Azure resource Id of a user assigned managed identity typically in the form "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" that should have been assigned to the search service. - /// is null. - public SearchIndexerDataUserAssignedIdentity(ResourceIdentifier userAssignedIdentity) - { - Argument.AssertNotNull(userAssignedIdentity, nameof(userAssignedIdentity)); - - UserAssignedIdentity = userAssignedIdentity; - ODataType = "#Microsoft.Azure.Search.DataUserAssignedIdentity"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of identity. - /// The fully qualified Azure resource Id of a user assigned managed identity typically in the form "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" that should have been assigned to the search service. - internal SearchIndexerDataUserAssignedIdentity(string oDataType, ResourceIdentifier userAssignedIdentity) : base(oDataType) - { - UserAssignedIdentity = userAssignedIdentity; - ODataType = oDataType ?? "#Microsoft.Azure.Search.DataUserAssignedIdentity"; - } - - /// The fully qualified Azure resource Id of a user assigned managed identity typically in the form "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" that should have been assigned to the search service. - public ResourceIdentifier UserAssignedIdentity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.Serialization.cs deleted file mode 100644 index 3b07d15dd27a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.Serialization.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerError - { - internal static SearchIndexerError DeserializeSearchIndexerError(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string key = default; - string errorMessage = default; - int statusCode = default; - string name = default; - string details = default; - string documentationLink = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("errorMessage"u8)) - { - errorMessage = property.Value.GetString(); - continue; - } - if (property.NameEquals("statusCode"u8)) - { - statusCode = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("details"u8)) - { - details = property.Value.GetString(); - continue; - } - if (property.NameEquals("documentationLink"u8)) - { - documentationLink = property.Value.GetString(); - continue; - } - } - return new SearchIndexerError( - key, - errorMessage, - statusCode, - name, - details, - documentationLink); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerError FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerError(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.cs deleted file mode 100644 index eb32cea6dde4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerError.cs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents an item- or document-level indexing error. - public partial class SearchIndexerError - { - /// Initializes a new instance of . - /// The message describing the error that occurred while processing the item. - /// The status code indicating why the indexing operation failed. Possible values include: 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. - internal SearchIndexerError(string errorMessage, int statusCode) - { - ErrorMessage = errorMessage; - StatusCode = statusCode; - } - - /// Initializes a new instance of . - /// The key of the item for which indexing failed. - /// The message describing the error that occurred while processing the item. - /// The status code indicating why the indexing operation failed. Possible values include: 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. - /// The name of the source at which the error originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. - /// Additional, verbose details about the error to assist in debugging the indexer. This may not be always available. - /// A link to a troubleshooting guide for these classes of errors. This may not be always available. - internal SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink) - { - Key = key; - ErrorMessage = errorMessage; - StatusCode = statusCode; - Name = name; - Details = details; - DocumentationLink = documentationLink; - } - - /// The key of the item for which indexing failed. - public string Key { get; } - /// The message describing the error that occurred while processing the item. - public string ErrorMessage { get; } - /// The status code indicating why the indexing operation failed. Possible values include: 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. - public int StatusCode { get; } - /// The name of the source at which the error originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. - public string Name { get; } - /// Additional, verbose details about the error to assist in debugging the indexer. This may not be always available. - public string Details { get; } - /// A link to a troubleshooting guide for these classes of errors. This may not be always available. - public string DocumentationLink { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.Serialization.cs deleted file mode 100644 index f3eec83fb1a2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.Serialization.cs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerIndexProjection : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("selectors"u8); - writer.WriteStartArray(); - foreach (var item in Selectors) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("parameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WriteEndObject(); - } - - internal static SearchIndexerIndexProjection DeserializeSearchIndexerIndexProjection(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList selectors = default; - SearchIndexerIndexProjectionsParameters parameters = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("selectors"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerIndexProjectionSelector.DeserializeSearchIndexerIndexProjectionSelector(item)); - } - selectors = array; - continue; - } - if (property.NameEquals("parameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - parameters = SearchIndexerIndexProjectionsParameters.DeserializeSearchIndexerIndexProjectionsParameters(property.Value); - continue; - } - } - return new SearchIndexerIndexProjection(selectors, parameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerIndexProjection FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerIndexProjection(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.cs deleted file mode 100644 index 3ce3c39f5fe1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjection.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Definition of additional projections to secondary search indexes. - public partial class SearchIndexerIndexProjection - { - /// Initializes a new instance of . - /// A list of projections to be performed to secondary search indexes. - /// is null. - public SearchIndexerIndexProjection(IEnumerable selectors) - { - Argument.AssertNotNull(selectors, nameof(selectors)); - - Selectors = selectors.ToList(); - } - - /// Initializes a new instance of . - /// A list of projections to be performed to secondary search indexes. - /// A dictionary of index projection-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - internal SearchIndexerIndexProjection(IList selectors, SearchIndexerIndexProjectionsParameters parameters) - { - Selectors = selectors; - Parameters = parameters; - } - - /// A list of projections to be performed to secondary search indexes. - public IList Selectors { get; } - /// A dictionary of index projection-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - public SearchIndexerIndexProjectionsParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.Serialization.cs deleted file mode 100644 index bccda23a9744..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.Serialization.cs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerIndexProjectionSelector : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("targetIndexName"u8); - writer.WriteStringValue(TargetIndexName); - writer.WritePropertyName("parentKeyFieldName"u8); - writer.WriteStringValue(ParentKeyFieldName); - writer.WritePropertyName("sourceContext"u8); - writer.WriteStringValue(SourceContext); - writer.WritePropertyName("mappings"u8); - writer.WriteStartArray(); - foreach (var item in Mappings) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SearchIndexerIndexProjectionSelector DeserializeSearchIndexerIndexProjectionSelector(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string targetIndexName = default; - string parentKeyFieldName = default; - string sourceContext = default; - IList mappings = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("targetIndexName"u8)) - { - targetIndexName = property.Value.GetString(); - continue; - } - if (property.NameEquals("parentKeyFieldName"u8)) - { - parentKeyFieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceContext"u8)) - { - sourceContext = property.Value.GetString(); - continue; - } - if (property.NameEquals("mappings"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - mappings = array; - continue; - } - } - return new SearchIndexerIndexProjectionSelector(targetIndexName, parentKeyFieldName, sourceContext, mappings); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerIndexProjectionSelector FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerIndexProjectionSelector(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.cs deleted file mode 100644 index 3feefca6507f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionSelector.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Description for what data to store in the designated search index. - public partial class SearchIndexerIndexProjectionSelector - { - /// Initializes a new instance of . - /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. - /// Name of the field in the search index to map the parent document's key value to. Must be a string field that is filterable and not the key field. - /// Source context for the projections. Represents the cardinality at which the document will be split into multiple sub documents. - /// Mappings for the projection, or which source should be mapped to which field in the target index. - /// , , or is null. - public SearchIndexerIndexProjectionSelector(string targetIndexName, string parentKeyFieldName, string sourceContext, IEnumerable mappings) - { - Argument.AssertNotNull(targetIndexName, nameof(targetIndexName)); - Argument.AssertNotNull(parentKeyFieldName, nameof(parentKeyFieldName)); - Argument.AssertNotNull(sourceContext, nameof(sourceContext)); - Argument.AssertNotNull(mappings, nameof(mappings)); - - TargetIndexName = targetIndexName; - ParentKeyFieldName = parentKeyFieldName; - SourceContext = sourceContext; - Mappings = mappings.ToList(); - } - - /// Initializes a new instance of . - /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. - /// Name of the field in the search index to map the parent document's key value to. Must be a string field that is filterable and not the key field. - /// Source context for the projections. Represents the cardinality at which the document will be split into multiple sub documents. - /// Mappings for the projection, or which source should be mapped to which field in the target index. - internal SearchIndexerIndexProjectionSelector(string targetIndexName, string parentKeyFieldName, string sourceContext, IList mappings) - { - TargetIndexName = targetIndexName; - ParentKeyFieldName = parentKeyFieldName; - SourceContext = sourceContext; - Mappings = mappings; - } - - /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. - public string TargetIndexName { get; set; } - /// Name of the field in the search index to map the parent document's key value to. Must be a string field that is filterable and not the key field. - public string ParentKeyFieldName { get; set; } - /// Source context for the projections. Represents the cardinality at which the document will be split into multiple sub documents. - public string SourceContext { get; set; } - /// Mappings for the projection, or which source should be mapped to which field in the target index. - public IList Mappings { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.Serialization.cs deleted file mode 100644 index ffa5a8543fe7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.Serialization.cs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerIndexProjectionsParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(ProjectionMode)) - { - writer.WritePropertyName("projectionMode"u8); - writer.WriteStringValue(ProjectionMode.Value.ToString()); - } - foreach (var item in AdditionalProperties) - { - writer.WritePropertyName(item.Key); - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - - internal static SearchIndexerIndexProjectionsParameters DeserializeSearchIndexerIndexProjectionsParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IndexProjectionMode? projectionMode = default; - IDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("projectionMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - projectionMode = new IndexProjectionMode(property.Value.GetString()); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new SearchIndexerIndexProjectionsParameters(projectionMode, additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerIndexProjectionsParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerIndexProjectionsParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.cs deleted file mode 100644 index 01509771f65c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerIndexProjectionsParameters.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A dictionary of index projection-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - public partial class SearchIndexerIndexProjectionsParameters - { - /// Initializes a new instance of . - public SearchIndexerIndexProjectionsParameters() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// Defines behavior of the index projections in relation to the rest of the indexer. - /// Additional Properties. - internal SearchIndexerIndexProjectionsParameters(IndexProjectionMode? projectionMode, IDictionary additionalProperties) - { - ProjectionMode = projectionMode; - AdditionalProperties = additionalProperties; - } - - /// Defines behavior of the index projections in relation to the rest of the indexer. - public IndexProjectionMode? ProjectionMode { get; set; } - /// Additional Properties. - public IDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.Serialization.cs deleted file mode 100644 index 615bebe72fdd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.Serialization.cs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerKnowledgeStoreParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(SynthesizeGeneratedKeyName)) - { - writer.WritePropertyName("synthesizeGeneratedKeyName"u8); - writer.WriteBooleanValue(SynthesizeGeneratedKeyName.Value); - } - foreach (var item in AdditionalProperties) - { - writer.WritePropertyName(item.Key); - writer.WriteObjectValue(item.Value); - } - writer.WriteEndObject(); - } - - internal static SearchIndexerKnowledgeStoreParameters DeserializeSearchIndexerKnowledgeStoreParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? synthesizeGeneratedKeyName = default; - IDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("synthesizeGeneratedKeyName"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - synthesizeGeneratedKeyName = property.Value.GetBoolean(); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new SearchIndexerKnowledgeStoreParameters(synthesizeGeneratedKeyName, additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerKnowledgeStoreParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerKnowledgeStoreParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.cs deleted file mode 100644 index 6c30d9beacdd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerKnowledgeStoreParameters.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A dictionary of knowledge store-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. - public partial class SearchIndexerKnowledgeStoreParameters - { - /// Initializes a new instance of . - public SearchIndexerKnowledgeStoreParameters() - { - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// Whether or not projections should synthesize a generated key name if one isn't already present. - /// Additional Properties. - internal SearchIndexerKnowledgeStoreParameters(bool? synthesizeGeneratedKeyName, IDictionary additionalProperties) - { - SynthesizeGeneratedKeyName = synthesizeGeneratedKeyName; - AdditionalProperties = additionalProperties; - } - - /// Whether or not projections should synthesize a generated key name if one isn't already present. - public bool? SynthesizeGeneratedKeyName { get; set; } - /// Additional Properties. - public IDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.Serialization.cs deleted file mode 100644 index b27c89954b63..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerLimits - { - internal static SearchIndexerLimits DeserializeSearchIndexerLimits(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TimeSpan? maxRunTime = default; - long? maxDocumentExtractionSize = default; - long? maxDocumentContentCharactersToExtract = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxRunTime"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxRunTime = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("maxDocumentExtractionSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxDocumentExtractionSize = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("maxDocumentContentCharactersToExtract"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxDocumentContentCharactersToExtract = property.Value.GetInt64(); - continue; - } - } - return new SearchIndexerLimits(maxRunTime, maxDocumentExtractionSize, maxDocumentContentCharactersToExtract); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerLimits FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerLimits(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.cs deleted file mode 100644 index 1cadd38c7662..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerLimits.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The SearchIndexerLimits. - public partial class SearchIndexerLimits - { - /// Initializes a new instance of . - internal SearchIndexerLimits() - { - } - - /// Initializes a new instance of . - /// The maximum duration that the indexer is permitted to run for one execution. - /// The maximum size of a document, in bytes, which will be considered valid for indexing. - /// The maximum number of characters that will be extracted from a document picked up for indexing. - internal SearchIndexerLimits(TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract) - { - MaxRunTime = maxRunTime; - MaxDocumentExtractionSize = maxDocumentExtractionSize; - MaxDocumentContentCharactersToExtract = maxDocumentContentCharactersToExtract; - } - - /// The maximum duration that the indexer is permitted to run for one execution. - public TimeSpan? MaxRunTime { get; } - /// The maximum size of a document, in bytes, which will be considered valid for indexing. - public long? MaxDocumentExtractionSize { get; } - /// The maximum number of characters that will be extracted from a document picked up for indexing. - public long? MaxDocumentContentCharactersToExtract { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.Serialization.cs deleted file mode 100644 index e70066d747d8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.Serialization.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SearchIndexerSkill DeserializeSearchIndexerSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Skills.Custom.AmlSkill": return AzureMachineLearningSkill.DeserializeAzureMachineLearningSkill(element); - case "#Microsoft.Skills.Custom.WebApiSkill": return WebApiSkill.DeserializeWebApiSkill(element); - case "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill": return AzureOpenAIEmbeddingSkill.DeserializeAzureOpenAIEmbeddingSkill(element); - case "#Microsoft.Skills.Text.CustomEntityLookupSkill": return CustomEntityLookupSkill.DeserializeCustomEntityLookupSkill(element); - case "#Microsoft.Skills.Text.EntityRecognitionSkill": return EntityRecognitionSkill.DeserializeEntityRecognitionSkill(element); - case "#Microsoft.Skills.Text.KeyPhraseExtractionSkill": return KeyPhraseExtractionSkill.DeserializeKeyPhraseExtractionSkill(element); - case "#Microsoft.Skills.Text.LanguageDetectionSkill": return LanguageDetectionSkill.DeserializeLanguageDetectionSkill(element); - case "#Microsoft.Skills.Text.MergeSkill": return MergeSkill.DeserializeMergeSkill(element); - case "#Microsoft.Skills.Text.PIIDetectionSkill": return PiiDetectionSkill.DeserializePiiDetectionSkill(element); - case "#Microsoft.Skills.Text.SentimentSkill": return SentimentSkill.DeserializeSentimentSkill(element); - case "#Microsoft.Skills.Text.SplitSkill": return SplitSkill.DeserializeSplitSkill(element); - case "#Microsoft.Skills.Text.TranslationSkill": return TextTranslationSkill.DeserializeTextTranslationSkill(element); - case "#Microsoft.Skills.Text.V3.EntityLinkingSkill": return EntityLinkingSkill.DeserializeEntityLinkingSkill(element); - case "#Microsoft.Skills.Text.V3.EntityRecognitionSkill": return EntityRecognitionSkillV3.DeserializeEntityRecognitionSkillV3(element); - case "#Microsoft.Skills.Text.V3.SentimentSkill": return SentimentSkillV3.DeserializeSentimentSkillV3(element); - case "#Microsoft.Skills.Util.ConditionalSkill": return ConditionalSkill.DeserializeConditionalSkill(element); - case "#Microsoft.Skills.Util.DocumentExtractionSkill": return DocumentExtractionSkill.DeserializeDocumentExtractionSkill(element); - case "#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill": return DocumentIntelligenceLayoutSkill.DeserializeDocumentIntelligenceLayoutSkill(element); - case "#Microsoft.Skills.Util.ShaperSkill": return ShaperSkill.DeserializeShaperSkill(element); - case "#Microsoft.Skills.Vision.ImageAnalysisSkill": return ImageAnalysisSkill.DeserializeImageAnalysisSkill(element); - case "#Microsoft.Skills.Vision.OcrSkill": return OcrSkill.DeserializeOcrSkill(element); - case "#Microsoft.Skills.Vision.VectorizeSkill": return VisionVectorizeSkill.DeserializeVisionVectorizeSkill(element); - } - } - return UnknownSearchIndexerSkill.DeserializeUnknownSearchIndexerSkill(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerSkill(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.cs deleted file mode 100644 index bf57982dd95d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkill.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for skills. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , and . - /// - public partial class SearchIndexerSkill - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - internal SearchIndexerSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs) - { - ODataType = oDataType; - Name = name; - Description = description; - Context = context; - Inputs = inputs; - Outputs = outputs; - } - - /// A URI fragment specifying the type of skill. - internal string ODataType { get; set; } - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - public string Description { get; set; } - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - public string Context { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.Serialization.cs deleted file mode 100644 index f7e37d7bd4bd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.Serialization.cs +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerSkillset : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WritePropertyName("skills"u8); - writer.WriteStartArray(); - foreach (var item in Skills) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(CognitiveServicesAccount)) - { - writer.WritePropertyName("cognitiveServices"u8); - writer.WriteObjectValue(CognitiveServicesAccount); - } - if (Optional.IsDefined(KnowledgeStore)) - { - writer.WritePropertyName("knowledgeStore"u8); - writer.WriteObjectValue(KnowledgeStore); - } - if (Optional.IsDefined(IndexProjection)) - { - writer.WritePropertyName("indexProjections"u8); - writer.WriteObjectValue(IndexProjection); - } - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - if (Optional.IsDefined(EncryptionKey)) - { - if (EncryptionKey != null) - { - writer.WritePropertyName("encryptionKey"u8); - writer.WriteObjectValue(EncryptionKey); - } - else - { - writer.WriteNull("encryptionKey"); - } - } - writer.WriteEndObject(); - } - - internal static SearchIndexerSkillset DeserializeSearchIndexerSkillset(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string description = default; - IList skills = default; - CognitiveServicesAccount cognitiveServices = default; - KnowledgeStore knowledgeStore = default; - SearchIndexerIndexProjection indexProjections = default; - string odataEtag = default; - SearchResourceEncryptionKey encryptionKey = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("skills"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SearchIndexerSkill.DeserializeSearchIndexerSkill(item)); - } - skills = array; - continue; - } - if (property.NameEquals("cognitiveServices"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - cognitiveServices = CognitiveServicesAccount.DeserializeCognitiveServicesAccount(property.Value); - continue; - } - if (property.NameEquals("knowledgeStore"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - knowledgeStore = KnowledgeStore.DeserializeKnowledgeStore(property.Value); - continue; - } - if (property.NameEquals("indexProjections"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - indexProjections = SearchIndexerIndexProjection.DeserializeSearchIndexerIndexProjection(property.Value); - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - if (property.NameEquals("encryptionKey"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encryptionKey = null; - continue; - } - encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value); - continue; - } - } - return new SearchIndexerSkillset( - name, - description, - skills, - cognitiveServices, - knowledgeStore, - indexProjections, - odataEtag, - encryptionKey); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerSkillset FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerSkillset(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.cs deleted file mode 100644 index 764d79fd01c7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerSkillset.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A list of skills. - public partial class SearchIndexerSkillset - { - /// Initializes a new instance of . - /// The name of the skillset. - /// - /// A list of skills in the skillset. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , and . - /// - /// or is null. - public SearchIndexerSkillset(string name, IEnumerable skills) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(skills, nameof(skills)); - - Name = name; - Skills = skills.ToList(); - } - - /// Initializes a new instance of . - /// The name of the skillset. - /// The description of the skillset. - /// - /// A list of skills in the skillset. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , and . - /// - /// - /// Details about the Azure AI service to be used when running skills. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - /// Definition of additional projections to Azure blob, table, or files, of enriched data. - /// Definition of additional projections to secondary search index(es). - /// The ETag of the skillset. - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition. Once you have encrypted your skillset definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - internal SearchIndexerSkillset(string name, string description, IList skills, CognitiveServicesAccount cognitiveServicesAccount, KnowledgeStore knowledgeStore, SearchIndexerIndexProjection indexProjection, string etag, SearchResourceEncryptionKey encryptionKey) - { - Name = name; - Description = description; - Skills = skills; - CognitiveServicesAccount = cognitiveServicesAccount; - KnowledgeStore = knowledgeStore; - IndexProjection = indexProjection; - _etag = etag; - EncryptionKey = encryptionKey; - } - - /// The name of the skillset. - public string Name { get; set; } - /// The description of the skillset. - public string Description { get; set; } - /// - /// Details about the Azure AI service to be used when running skills. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public CognitiveServicesAccount CognitiveServicesAccount { get; set; } - /// Definition of additional projections to Azure blob, table, or files, of enriched data. - public KnowledgeStore KnowledgeStore { get; set; } - /// Definition of additional projections to secondary search index(es). - public SearchIndexerIndexProjection IndexProjection { get; set; } - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition. Once you have encrypted your skillset definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - public SearchResourceEncryptionKey EncryptionKey { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.Serialization.cs deleted file mode 100644 index 65e6164e6f21..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.Serialization.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerStatus - { - internal static SearchIndexerStatus DeserializeSearchIndexerStatus(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IndexerStatus status = default; - IndexerExecutionResult lastResult = default; - IReadOnlyList executionHistory = default; - SearchIndexerLimits limits = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("status"u8)) - { - status = property.Value.GetString().ToIndexerStatus(); - continue; - } - if (property.NameEquals("lastResult"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - lastResult = null; - continue; - } - lastResult = IndexerExecutionResult.DeserializeIndexerExecutionResult(property.Value); - continue; - } - if (property.NameEquals("executionHistory"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(IndexerExecutionResult.DeserializeIndexerExecutionResult(item)); - } - executionHistory = array; - continue; - } - if (property.NameEquals("limits"u8)) - { - limits = SearchIndexerLimits.DeserializeSearchIndexerLimits(property.Value); - continue; - } - } - return new SearchIndexerStatus(status, lastResult, executionHistory, limits); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerStatus FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerStatus(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.cs deleted file mode 100644 index 3d58b1f940b8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerStatus.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents the current status and execution history of an indexer. - public partial class SearchIndexerStatus - { - /// Initializes a new instance of . - /// Overall indexer status. - /// History of the recent indexer executions, sorted in reverse chronological order. - /// The execution limits for the indexer. - internal SearchIndexerStatus(IndexerStatus status, IEnumerable executionHistory, SearchIndexerLimits limits) - { - Status = status; - ExecutionHistory = executionHistory.ToList(); - Limits = limits; - } - - /// Initializes a new instance of . - /// Overall indexer status. - /// The result of the most recent or an in-progress indexer execution. - /// History of the recent indexer executions, sorted in reverse chronological order. - /// The execution limits for the indexer. - internal SearchIndexerStatus(IndexerStatus status, IndexerExecutionResult lastResult, IReadOnlyList executionHistory, SearchIndexerLimits limits) - { - Status = status; - LastResult = lastResult; - ExecutionHistory = executionHistory; - Limits = limits; - } - - /// Overall indexer status. - public IndexerStatus Status { get; } - /// The result of the most recent or an in-progress indexer execution. - public IndexerExecutionResult LastResult { get; } - /// History of the recent indexer executions, sorted in reverse chronological order. - public IReadOnlyList ExecutionHistory { get; } - /// The execution limits for the indexer. - public SearchIndexerLimits Limits { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.Serialization.cs deleted file mode 100644 index d8ef1a1f79c7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.Serialization.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchIndexerWarning - { - internal static SearchIndexerWarning DeserializeSearchIndexerWarning(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string key = default; - string message = default; - string name = default; - string details = default; - string documentationLink = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("key"u8)) - { - key = property.Value.GetString(); - continue; - } - if (property.NameEquals("message"u8)) - { - message = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("details"u8)) - { - details = property.Value.GetString(); - continue; - } - if (property.NameEquals("documentationLink"u8)) - { - documentationLink = property.Value.GetString(); - continue; - } - } - return new SearchIndexerWarning(key, message, name, details, documentationLink); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchIndexerWarning FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchIndexerWarning(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.cs deleted file mode 100644 index d26b5f34e587..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerWarning.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents an item-level warning. - public partial class SearchIndexerWarning - { - /// Initializes a new instance of . - /// The message describing the warning that occurred while processing the item. - internal SearchIndexerWarning(string message) - { - Message = message; - } - - /// Initializes a new instance of . - /// The key of the item which generated a warning. - /// The message describing the warning that occurred while processing the item. - /// The name of the source at which the warning originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. - /// Additional, verbose details about the warning to assist in debugging the indexer. This may not be always available. - /// A link to a troubleshooting guide for these classes of warnings. This may not be always available. - internal SearchIndexerWarning(string key, string message, string name, string details, string documentationLink) - { - Key = key; - Message = message; - Name = name; - Details = details; - DocumentationLink = documentationLink; - } - - /// The key of the item which generated a warning. - public string Key { get; } - /// The message describing the warning that occurred while processing the item. - public string Message { get; } - /// The name of the source at which the warning originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. - public string Name { get; } - /// Additional, verbose details about the warning to assist in debugging the indexer. This may not be always available. - public string Details { get; } - /// A link to a troubleshooting guide for these classes of warnings. This may not be always available. - public string DocumentationLink { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchMode.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchMode.Serialization.cs deleted file mode 100644 index d9ca59cbfe57..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchMode.Serialization.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - internal static partial class SearchModeExtensions - { - public static string ToSerialString(this SearchMode value) => value switch - { - SearchMode.Any => "any", - SearchMode.All => "all", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown SearchMode value.") - }; - - public static SearchMode ToSearchMode(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "any")) return SearchMode.Any; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "all")) return SearchMode.All; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown SearchMode value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.cs deleted file mode 100644 index 658a7d2f4873..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - /// Parameters for filtering, sorting, faceting, paging, and other search query behaviors. - public partial class SearchOptions - { - /// Initializes a new instance of . - public SearchOptions() - { - Facets = new ChangeTrackingList(); - ScoringParameters = new ChangeTrackingList(); - VectorQueries = new ChangeTrackingList(); - } - /// A string tag that is appended to hit highlights. Must be set with highlightPreTag. Default is </em>. - public string HighlightPostTag { get; set; } - /// A string tag that is prepended to hit highlights. Must be set with highlightPostTag. Default is <em>. - public string HighlightPreTag { get; set; } - /// A number between 0 and 100 indicating the percentage of the index that must be covered by a search query in order for the query to be reported as a success. This parameter can be useful for ensuring search availability even for services with only one replica. The default is 100. - public double? MinimumCoverage { get; set; } - /// A value that specifies the syntax of the search query. The default is 'simple'. Use 'full' if your query uses the Lucene query syntax. - public SearchQueryType? QueryType { get; set; } - /// A value that specifies whether we want to calculate scoring statistics (such as document frequency) globally for more consistent scoring, or locally, for lower latency. The default is 'local'. Use 'global' to aggregate scoring statistics globally before scoring. Using global scoring statistics can increase latency of search queries. - public ScoringStatistics? ScoringStatistics { get; set; } - /// A value to be used to create a sticky session, which can help getting more consistent results. As long as the same sessionId is used, a best-effort attempt will be made to target the same replica set. Be wary that reusing the same sessionID values repeatedly can interfere with the load balancing of the requests across replicas and adversely affect the performance of the search service. The value used as sessionId cannot start with a '_' character. - public string SessionId { get; set; } - /// The name of a scoring profile to evaluate match scores for matching documents in order to sort the results. - public string ScoringProfile { get; set; } - /// A value that specifies whether any or all of the search terms must be matched in order to count the document as a match. - public SearchMode? SearchMode { get; set; } - /// The number of search results to skip. This value cannot be greater than 100,000. If you need to scan documents in sequence, but cannot use skip due to this limitation, consider using orderby on a totally-ordered key and filter with a range query instead. - public int? Skip { get; set; } - /// The query parameters to configure hybrid search behaviors. - public HybridSearch HybridSearch { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.Serialization.cs deleted file mode 100644 index 357a80dc776e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.Serialization.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchResourceCounter - { - internal static SearchResourceCounter DeserializeSearchResourceCounter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - long usage = default; - long? quota = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("usage"u8)) - { - usage = property.Value.GetInt64(); - continue; - } - if (property.NameEquals("quota"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - quota = null; - continue; - } - quota = property.Value.GetInt64(); - continue; - } - } - return new SearchResourceCounter(usage, quota); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchResourceCounter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchResourceCounter(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.cs deleted file mode 100644 index 07134d24e83f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceCounter.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a resource's usage and quota. - public partial class SearchResourceCounter - { - /// Initializes a new instance of . - /// The resource usage amount. - internal SearchResourceCounter(long usage) - { - Usage = usage; - } - - /// Initializes a new instance of . - /// The resource usage amount. - /// The resource amount quota. - internal SearchResourceCounter(long usage, long? quota) - { - Usage = usage; - Quota = quota; - } - - /// The resource usage amount. - public long Usage { get; } - /// The resource amount quota. - public long? Quota { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.Serialization.cs deleted file mode 100644 index 97ca75c316f9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.Serialization.cs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchResourceEncryptionKey : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("keyVaultKeyName"u8); - writer.WriteStringValue(KeyName); - writer.WritePropertyName("keyVaultKeyVersion"u8); - writer.WriteStringValue(KeyVersion); - writer.WritePropertyName("keyVaultUri"u8); - writer.WriteStringValue(_vaultUri); - if (Optional.IsDefined(AccessCredentialsInternal)) - { - writer.WritePropertyName("accessCredentials"u8); - writer.WriteObjectValue(AccessCredentialsInternal); - } - if (Optional.IsDefined(Identity)) - { - if (Identity != null) - { - writer.WritePropertyName("identity"u8); - writer.WriteObjectValue(Identity); - } - else - { - writer.WriteNull("identity"); - } - } - writer.WriteEndObject(); - } - - internal static SearchResourceEncryptionKey DeserializeSearchResourceEncryptionKey(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string keyVaultKeyName = default; - string keyVaultKeyVersion = default; - string keyVaultUri = default; - AzureActiveDirectoryApplicationCredentials accessCredentials = default; - SearchIndexerDataIdentity identity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("keyVaultKeyName"u8)) - { - keyVaultKeyName = property.Value.GetString(); - continue; - } - if (property.NameEquals("keyVaultKeyVersion"u8)) - { - keyVaultKeyVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("keyVaultUri"u8)) - { - keyVaultUri = property.Value.GetString(); - continue; - } - if (property.NameEquals("accessCredentials"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - accessCredentials = AzureActiveDirectoryApplicationCredentials.DeserializeAzureActiveDirectoryApplicationCredentials(property.Value); - continue; - } - if (property.NameEquals("identity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - identity = null; - continue; - } - identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - } - return new SearchResourceEncryptionKey(keyVaultKeyName, keyVaultKeyVersion, keyVaultUri, accessCredentials, identity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchResourceEncryptionKey FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchResourceEncryptionKey(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.cs deleted file mode 100644 index bbb9e6c17fa1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResourceEncryptionKey.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A customer-managed encryption key in Azure Key Vault. Keys that you create and manage can be used to encrypt or decrypt data-at-rest, such as indexes and synonym maps. - public partial class SearchResourceEncryptionKey - { - /// Initializes a new instance of . - /// The name of your Azure Key Vault key to be used to encrypt your data at rest. - /// The version of your Azure Key Vault key to be used to encrypt your data at rest. - /// The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be `https://my-keyvault-name.vault.azure.net`. - /// Optional Azure Active Directory credentials used for accessing your Azure Key Vault. Not required if using managed identity instead. - /// - /// An explicit managed identity to use for this encryption key. If not specified and the access credentials property is null, the system-assigned managed identity is used. On update to the resource, if the explicit identity is unspecified, it remains unchanged. If "none" is specified, the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal SearchResourceEncryptionKey(string keyName, string keyVersion, string vaultUri, AzureActiveDirectoryApplicationCredentials accessCredentialsInternal, SearchIndexerDataIdentity identity) - { - KeyName = keyName; - KeyVersion = keyVersion; - _vaultUri = vaultUri; - AccessCredentialsInternal = accessCredentialsInternal; - Identity = identity; - } - /// - /// An explicit managed identity to use for this encryption key. If not specified and the access credentials property is null, the system-assigned managed identity is used. On update to the resource, if the explicit identity is unspecified, it remains unchanged. If "none" is specified, the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity Identity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.Serialization.cs deleted file mode 100644 index ef439e06e95e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.Serialization.cs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class SearchResult - { - internal static SearchResult DeserializeSearchResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double searchScore = default; - double? searchRerankerScore = default; - IReadOnlyDictionary> searchHighlights = default; - IReadOnlyList searchCaptions = default; - DocumentDebugInfo searchDocumentDebugInfo = default; - IReadOnlyDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@search.score"u8)) - { - searchScore = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("@search.rerankerScore"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchRerankerScore = null; - continue; - } - searchRerankerScore = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("@search.highlights"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - Dictionary> dictionary = new Dictionary>(); - foreach (var property0 in property.Value.EnumerateObject()) - { - if (property0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(property0.Name, null); - } - else - { - List array = new List(); - foreach (var item in property0.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - dictionary.Add(property0.Name, array); - } - } - searchHighlights = dictionary; - continue; - } - if (property.NameEquals("@search.captions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchCaptions = null; - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(QueryCaptionResult.DeserializeQueryCaptionResult(item)); - } - searchCaptions = array; - continue; - } - if (property.NameEquals("@search.documentDebugInfo"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - searchDocumentDebugInfo = null; - continue; - } - searchDocumentDebugInfo = DocumentDebugInfo.DeserializeDocumentDebugInfo(property.Value); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new SearchResult( - searchScore, - searchRerankerScore, - searchHighlights ?? new ChangeTrackingDictionary>(), - searchCaptions ?? new ChangeTrackingList(), - searchDocumentDebugInfo, - additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.cs deleted file mode 100644 index dc3fd651496e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchResult.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// Contains a document found by a search query, plus associated metadata. - internal partial class SearchResult - { - /// Initializes a new instance of . - /// The relevance score of the document compared to other documents returned by the query. - internal SearchResult(double score) - { - Score = score; - Highlights = new ChangeTrackingDictionary>(); - Captions = new ChangeTrackingList(); - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The relevance score of the document compared to other documents returned by the query. - /// The relevance score computed by the semantic ranker for the top search results. Search results are sorted by the RerankerScore first and then by the Score. RerankerScore is only returned for queries of type 'semantic'. - /// Text fragments from the document that indicate the matching search terms, organized by each applicable field; null if hit highlighting was not enabled for the query. - /// Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type 'semantic'. - /// Contains debugging information that can be used to further explore your search results. - /// Additional Properties. - internal SearchResult(double score, double? rerankerScore, IReadOnlyDictionary> highlights, IReadOnlyList captions, DocumentDebugInfo documentDebugInfo, IReadOnlyDictionary additionalProperties) - { - Score = score; - RerankerScore = rerankerScore; - Highlights = highlights; - Captions = captions; - DocumentDebugInfo = documentDebugInfo; - AdditionalProperties = additionalProperties; - } - - /// The relevance score of the document compared to other documents returned by the query. - public double Score { get; } - /// The relevance score computed by the semantic ranker for the top search results. Search results are sorted by the RerankerScore first and then by the Score. RerankerScore is only returned for queries of type 'semantic'. - public double? RerankerScore { get; } - /// Text fragments from the document that indicate the matching search terms, organized by each applicable field; null if hit highlighting was not enabled for the query. - public IReadOnlyDictionary> Highlights { get; } - /// Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type 'semantic'. - public IReadOnlyList Captions { get; } - /// Contains debugging information that can be used to further explore your search results. - public DocumentDebugInfo DocumentDebugInfo { get; } - /// Additional Properties. - public IReadOnlyDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.Serialization.cs deleted file mode 100644 index a64d6a234c91..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class SearchScoreThreshold : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("value"u8); - writer.WriteNumberValue(Value); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static SearchScoreThreshold DeserializeSearchScoreThreshold(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double value = default; - VectorThresholdKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - value = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorThresholdKind(property.Value.GetString()); - continue; - } - } - return new SearchScoreThreshold(kind, value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SearchScoreThreshold FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchScoreThreshold(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.cs deleted file mode 100644 index c4187e5820ae..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchScoreThreshold.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The results of the vector query will filter based on the '@search.score' value. Note this is the @search.score returned as part of the search response. The threshold direction will be chosen for higher @search.score. - public partial class SearchScoreThreshold : VectorThreshold - { - /// Initializes a new instance of . - /// The threshold will filter based on the '@search.score' value. Note this is the @search.score returned as part of the search response. The threshold direction will be chosen for higher @search.score. - public SearchScoreThreshold(double value) - { - Value = value; - Kind = VectorThresholdKind.SearchScore; - } - - /// Initializes a new instance of . - /// The kind of threshold used to filter vector queries. - /// The threshold will filter based on the '@search.score' value. Note this is the @search.score returned as part of the search response. The threshold direction will be chosen for higher @search.score. - internal SearchScoreThreshold(VectorThresholdKind kind, double value) : base(kind) - { - Value = value; - Kind = kind; - } - - /// The threshold will filter based on the '@search.score' value. Note this is the @search.score returned as part of the search response. The threshold direction will be chosen for higher @search.score. - public double Value { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.Serialization.cs deleted file mode 100644 index 0ce21b6def7a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.Serialization.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchServiceCounters - { - internal static SearchServiceCounters DeserializeSearchServiceCounters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SearchResourceCounter aliasesCount = default; - SearchResourceCounter documentCount = default; - SearchResourceCounter indexesCount = default; - SearchResourceCounter indexersCount = default; - SearchResourceCounter dataSourcesCount = default; - SearchResourceCounter storageSize = default; - SearchResourceCounter synonymMaps = default; - SearchResourceCounter skillsetCount = default; - SearchResourceCounter vectorIndexSize = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("aliasesCount"u8)) - { - aliasesCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("documentCount"u8)) - { - documentCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("indexesCount"u8)) - { - indexesCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("indexersCount"u8)) - { - indexersCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("dataSourcesCount"u8)) - { - dataSourcesCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("storageSize"u8)) - { - storageSize = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("synonymMaps"u8)) - { - synonymMaps = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("skillsetCount"u8)) - { - skillsetCount = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - if (property.NameEquals("vectorIndexSize"u8)) - { - vectorIndexSize = SearchResourceCounter.DeserializeSearchResourceCounter(property.Value); - continue; - } - } - return new SearchServiceCounters( - aliasesCount, - documentCount, - indexesCount, - indexersCount, - dataSourcesCount, - storageSize, - synonymMaps, - skillsetCount, - vectorIndexSize); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchServiceCounters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchServiceCounters(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.cs deleted file mode 100644 index c63049576a30..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceCounters.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents service-level resource counters and quotas. - public partial class SearchServiceCounters - { - /// Initializes a new instance of . - /// Total number of aliases. - /// Total number of documents across all indexes in the service. - /// Total number of indexes. - /// Total number of indexers. - /// Total number of data sources. - /// Total size of used storage in bytes. - /// Total number of synonym maps. - /// Total number of skillsets. - /// Total memory consumption of all vector indexes within the service, in bytes. - /// , , , , , , , or is null. - internal SearchServiceCounters(SearchResourceCounter aliasCounter, SearchResourceCounter documentCounter, SearchResourceCounter indexCounter, SearchResourceCounter indexerCounter, SearchResourceCounter dataSourceCounter, SearchResourceCounter storageSizeCounter, SearchResourceCounter synonymMapCounter, SearchResourceCounter skillsetCounter, SearchResourceCounter vectorIndexSizeCounter) - { - Argument.AssertNotNull(aliasCounter, nameof(aliasCounter)); - Argument.AssertNotNull(documentCounter, nameof(documentCounter)); - Argument.AssertNotNull(indexCounter, nameof(indexCounter)); - Argument.AssertNotNull(indexerCounter, nameof(indexerCounter)); - Argument.AssertNotNull(dataSourceCounter, nameof(dataSourceCounter)); - Argument.AssertNotNull(storageSizeCounter, nameof(storageSizeCounter)); - Argument.AssertNotNull(synonymMapCounter, nameof(synonymMapCounter)); - Argument.AssertNotNull(skillsetCounter, nameof(skillsetCounter)); - Argument.AssertNotNull(vectorIndexSizeCounter, nameof(vectorIndexSizeCounter)); - - AliasCounter = aliasCounter; - DocumentCounter = documentCounter; - IndexCounter = indexCounter; - IndexerCounter = indexerCounter; - DataSourceCounter = dataSourceCounter; - StorageSizeCounter = storageSizeCounter; - SynonymMapCounter = synonymMapCounter; - SkillsetCounter = skillsetCounter; - VectorIndexSizeCounter = vectorIndexSizeCounter; - } - - /// Total number of aliases. - public SearchResourceCounter AliasCounter { get; } - /// Total number of documents across all indexes in the service. - public SearchResourceCounter DocumentCounter { get; } - /// Total number of indexes. - public SearchResourceCounter IndexCounter { get; } - /// Total number of indexers. - public SearchResourceCounter IndexerCounter { get; } - /// Total number of data sources. - public SearchResourceCounter DataSourceCounter { get; } - /// Total size of used storage in bytes. - public SearchResourceCounter StorageSizeCounter { get; } - /// Total number of synonym maps. - public SearchResourceCounter SynonymMapCounter { get; } - /// Total number of skillsets. - public SearchResourceCounter SkillsetCounter { get; } - /// Total memory consumption of all vector indexes within the service, in bytes. - public SearchResourceCounter VectorIndexSizeCounter { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.Serialization.cs deleted file mode 100644 index b36a4c809482..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.Serialization.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchServiceLimits - { - internal static SearchServiceLimits DeserializeSearchServiceLimits(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxFieldsPerIndex = default; - int? maxFieldNestingDepthPerIndex = default; - int? maxComplexCollectionFieldsPerIndex = default; - int? maxComplexObjectsInCollectionsPerDocument = default; - long? maxStoragePerIndex = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxFieldsPerIndex"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxFieldsPerIndex = null; - continue; - } - maxFieldsPerIndex = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxFieldNestingDepthPerIndex"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxFieldNestingDepthPerIndex = null; - continue; - } - maxFieldNestingDepthPerIndex = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxComplexCollectionFieldsPerIndex"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxComplexCollectionFieldsPerIndex = null; - continue; - } - maxComplexCollectionFieldsPerIndex = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxComplexObjectsInCollectionsPerDocument"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxComplexObjectsInCollectionsPerDocument = null; - continue; - } - maxComplexObjectsInCollectionsPerDocument = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maxStoragePerIndex"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maxStoragePerIndex = null; - continue; - } - maxStoragePerIndex = property.Value.GetInt64(); - continue; - } - } - return new SearchServiceLimits(maxFieldsPerIndex, maxFieldNestingDepthPerIndex, maxComplexCollectionFieldsPerIndex, maxComplexObjectsInCollectionsPerDocument, maxStoragePerIndex); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchServiceLimits FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchServiceLimits(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.cs deleted file mode 100644 index 842c21705ab0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceLimits.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents various service level limits. - public partial class SearchServiceLimits - { - /// Initializes a new instance of . - internal SearchServiceLimits() - { - } - - /// Initializes a new instance of . - /// The maximum allowed fields per index. - /// The maximum depth which you can nest sub-fields in an index, including the top-level complex field. For example, a/b/c has a nesting depth of 3. - /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an index. - /// The maximum number of objects in complex collections allowed per document. - /// The maximum amount of storage in bytes allowed per index. - internal SearchServiceLimits(int? maxFieldsPerIndex, int? maxFieldNestingDepthPerIndex, int? maxComplexCollectionFieldsPerIndex, int? maxComplexObjectsInCollectionsPerDocument, long? maxStoragePerIndexInBytes) - { - MaxFieldsPerIndex = maxFieldsPerIndex; - MaxFieldNestingDepthPerIndex = maxFieldNestingDepthPerIndex; - MaxComplexCollectionFieldsPerIndex = maxComplexCollectionFieldsPerIndex; - MaxComplexObjectsInCollectionsPerDocument = maxComplexObjectsInCollectionsPerDocument; - MaxStoragePerIndexInBytes = maxStoragePerIndexInBytes; - } - - /// The maximum allowed fields per index. - public int? MaxFieldsPerIndex { get; } - /// The maximum depth which you can nest sub-fields in an index, including the top-level complex field. For example, a/b/c has a nesting depth of 3. - public int? MaxFieldNestingDepthPerIndex { get; } - /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an index. - public int? MaxComplexCollectionFieldsPerIndex { get; } - /// The maximum number of objects in complex collections allowed per document. - public int? MaxComplexObjectsInCollectionsPerDocument { get; } - /// The maximum amount of storage in bytes allowed per index. - public long? MaxStoragePerIndexInBytes { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.Serialization.cs deleted file mode 100644 index ac0dde12f064..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.Serialization.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchServiceStatistics - { - internal static SearchServiceStatistics DeserializeSearchServiceStatistics(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SearchServiceCounters counters = default; - SearchServiceLimits limits = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("counters"u8)) - { - counters = SearchServiceCounters.DeserializeSearchServiceCounters(property.Value); - continue; - } - if (property.NameEquals("limits"u8)) - { - limits = SearchServiceLimits.DeserializeSearchServiceLimits(property.Value); - continue; - } - } - return new SearchServiceStatistics(counters, limits); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchServiceStatistics FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchServiceStatistics(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.cs deleted file mode 100644 index efd1ac4579d3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchServiceStatistics.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Response from a get service statistics request. If successful, it includes service level counters and limits. - public partial class SearchServiceStatistics - { - /// Initializes a new instance of . - /// Service level resource counters. - /// Service level general limits. - /// or is null. - internal SearchServiceStatistics(SearchServiceCounters counters, SearchServiceLimits limits) - { - Argument.AssertNotNull(counters, nameof(counters)); - Argument.AssertNotNull(limits, nameof(limits)); - - Counters = counters; - Limits = limits; - } - - /// Service level resource counters. - public SearchServiceCounters Counters { get; } - /// Service level general limits. - public SearchServiceLimits Limits { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.Serialization.cs deleted file mode 100644 index 2e7603718715..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.Serialization.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SearchSuggester : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("searchMode"u8); - writer.WriteStringValue(SearchMode); - writer.WritePropertyName("sourceFields"u8); - writer.WriteStartArray(); - foreach (var item in SourceFields) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SearchSuggester DeserializeSearchSuggester(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string searchMode = default; - IList sourceFields = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("searchMode"u8)) - { - searchMode = property.Value.GetString(); - continue; - } - if (property.NameEquals("sourceFields"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - sourceFields = array; - continue; - } - } - return new SearchSuggester(name, searchMode, sourceFields); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SearchSuggester FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSearchSuggester(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.cs deleted file mode 100644 index 88c8806cea54..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchSuggester.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines how the Suggest API should apply to a group of fields in the index. - public partial class SearchSuggester - { - /// Initializes a new instance of . - /// The name of the suggester. - /// A value indicating the capabilities of the suggester. - /// The list of field names to which the suggester applies. Each field must be searchable. - internal SearchSuggester(string name, string searchMode, IList sourceFields) - { - Name = name; - SearchMode = searchMode; - SourceFields = sourceFields; - } - - /// The name of the suggester. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.Serialization.cs deleted file mode 100644 index d45f9b7d5886..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SemanticConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("prioritizedFields"u8); - writer.WriteObjectValue(PrioritizedFields); - writer.WriteEndObject(); - } - - internal static SemanticConfiguration DeserializeSemanticConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - SemanticPrioritizedFields prioritizedFields = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("prioritizedFields"u8)) - { - prioritizedFields = SemanticPrioritizedFields.DeserializeSemanticPrioritizedFields(property.Value); - continue; - } - } - return new SemanticConfiguration(name, prioritizedFields); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SemanticConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSemanticConfiguration(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.cs deleted file mode 100644 index 6d38d73c985a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticConfiguration.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a specific configuration to be used in the context of semantic capabilities. - public partial class SemanticConfiguration - { - /// Initializes a new instance of . - /// The name of the semantic configuration. - /// Describes the title, content, and keyword fields to be used for semantic ranking, captions, highlights, and answers. At least one of the three sub properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) need to be set. - /// or is null. - public SemanticConfiguration(string name, SemanticPrioritizedFields prioritizedFields) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(prioritizedFields, nameof(prioritizedFields)); - - Name = name; - PrioritizedFields = prioritizedFields; - } - - /// The name of the semantic configuration. - public string Name { get; set; } - /// Describes the title, content, and keyword fields to be used for semantic ranking, captions, highlights, and answers. At least one of the three sub properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) need to be set. - public SemanticPrioritizedFields PrioritizedFields { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.Serialization.cs deleted file mode 100644 index c536dd1ff547..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.Serialization.cs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class SemanticDebugInfo - { - internal static SemanticDebugInfo DeserializeSemanticDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - QueryResultDocumentSemanticField titleField = default; - IReadOnlyList contentFields = default; - IReadOnlyList keywordFields = default; - QueryResultDocumentRerankerInput rerankerInput = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("titleField"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - titleField = QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(property.Value); - continue; - } - if (property.NameEquals("contentFields"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(item)); - } - contentFields = array; - continue; - } - if (property.NameEquals("keywordFields"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(item)); - } - keywordFields = array; - continue; - } - if (property.NameEquals("rerankerInput"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - rerankerInput = QueryResultDocumentRerankerInput.DeserializeQueryResultDocumentRerankerInput(property.Value); - continue; - } - } - return new SemanticDebugInfo(titleField, contentFields ?? new ChangeTrackingList(), keywordFields ?? new ChangeTrackingList(), rerankerInput); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SemanticDebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSemanticDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.cs deleted file mode 100644 index 6f15066b9bbe..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticDebugInfo.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// The SemanticDebugInfo. - public partial class SemanticDebugInfo - { - /// Initializes a new instance of . - internal SemanticDebugInfo() - { - ContentFields = new ChangeTrackingList(); - KeywordFields = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// The title field that was sent to the semantic enrichment process, as well as how it was used. - /// The content fields that were sent to the semantic enrichment process, as well as how they were used. - /// The keyword fields that were sent to the semantic enrichment process, as well as how they were used. - /// The raw concatenated strings that were sent to the semantic enrichment process. - internal SemanticDebugInfo(QueryResultDocumentSemanticField titleField, IReadOnlyList contentFields, IReadOnlyList keywordFields, QueryResultDocumentRerankerInput rerankerInput) - { - TitleField = titleField; - ContentFields = contentFields; - KeywordFields = keywordFields; - RerankerInput = rerankerInput; - } - - /// The title field that was sent to the semantic enrichment process, as well as how it was used. - public QueryResultDocumentSemanticField TitleField { get; } - /// The content fields that were sent to the semantic enrichment process, as well as how they were used. - public IReadOnlyList ContentFields { get; } - /// The keyword fields that were sent to the semantic enrichment process, as well as how they were used. - public IReadOnlyList KeywordFields { get; } - /// The raw concatenated strings that were sent to the semantic enrichment process. - public QueryResultDocumentRerankerInput RerankerInput { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.Serialization.cs deleted file mode 100644 index be9118e13fca..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SemanticField : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WriteEndObject(); - } - - internal static SemanticField DeserializeSemanticField(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string fieldName = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - } - return new SemanticField(fieldName); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SemanticField FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSemanticField(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.cs deleted file mode 100644 index 4950fffd26d8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticField.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A field that is used as part of the semantic configuration. - public partial class SemanticField - { - /// Initializes a new instance of . - /// - /// is null. - public SemanticField(string fieldName) - { - Argument.AssertNotNull(fieldName, nameof(fieldName)); - - FieldName = fieldName; - } - - /// Gets or sets the field name. - public string FieldName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.Serialization.cs deleted file mode 100644 index 6c0b880a94bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.Serialization.cs +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SemanticPrioritizedFields : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(TitleField)) - { - writer.WritePropertyName("titleField"u8); - writer.WriteObjectValue(TitleField); - } - if (Optional.IsCollectionDefined(ContentFields)) - { - writer.WritePropertyName("prioritizedContentFields"u8); - writer.WriteStartArray(); - foreach (var item in ContentFields) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(KeywordsFields)) - { - writer.WritePropertyName("prioritizedKeywordsFields"u8); - writer.WriteStartArray(); - foreach (var item in KeywordsFields) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static SemanticPrioritizedFields DeserializeSemanticPrioritizedFields(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SemanticField titleField = default; - IList prioritizedContentFields = default; - IList prioritizedKeywordsFields = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("titleField"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - titleField = SemanticField.DeserializeSemanticField(property.Value); - continue; - } - if (property.NameEquals("prioritizedContentFields"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SemanticField.DeserializeSemanticField(item)); - } - prioritizedContentFields = array; - continue; - } - if (property.NameEquals("prioritizedKeywordsFields"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SemanticField.DeserializeSemanticField(item)); - } - prioritizedKeywordsFields = array; - continue; - } - } - return new SemanticPrioritizedFields(titleField, prioritizedContentFields ?? new ChangeTrackingList(), prioritizedKeywordsFields ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SemanticPrioritizedFields FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSemanticPrioritizedFields(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.cs deleted file mode 100644 index a45fa8dfdda5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticPrioritizedFields.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Describes the title, content, and keywords fields to be used for semantic ranking, captions, highlights, and answers. - public partial class SemanticPrioritizedFields - { - /// Initializes a new instance of . - public SemanticPrioritizedFields() - { - ContentFields = new ChangeTrackingList(); - KeywordsFields = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Defines the title field to be used for semantic ranking, captions, highlights, and answers. If you don't have a title field in your index, leave this blank. - /// Defines the content fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain text in natural language form. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. - /// Defines the keyword fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain a list of keywords. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. - internal SemanticPrioritizedFields(SemanticField titleField, IList contentFields, IList keywordsFields) - { - TitleField = titleField; - ContentFields = contentFields; - KeywordsFields = keywordsFields; - } - - /// Defines the title field to be used for semantic ranking, captions, highlights, and answers. If you don't have a title field in your index, leave this blank. - public SemanticField TitleField { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.Serialization.cs deleted file mode 100644 index b77c7e5cff99..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.Serialization.cs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SemanticSearch : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultConfigurationName)) - { - writer.WritePropertyName("defaultConfiguration"u8); - writer.WriteStringValue(DefaultConfigurationName); - } - if (Optional.IsCollectionDefined(Configurations)) - { - writer.WritePropertyName("configurations"u8); - writer.WriteStartArray(); - foreach (var item in Configurations) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static SemanticSearch DeserializeSemanticSearch(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string defaultConfiguration = default; - IList configurations = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultConfiguration"u8)) - { - defaultConfiguration = property.Value.GetString(); - continue; - } - if (property.NameEquals("configurations"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SemanticConfiguration.DeserializeSemanticConfiguration(item)); - } - configurations = array; - continue; - } - } - return new SemanticSearch(defaultConfiguration, configurations ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SemanticSearch FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSemanticSearch(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.cs deleted file mode 100644 index 4f47cb113168..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearch.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines parameters for a search index that influence semantic capabilities. - public partial class SemanticSearch - { - /// Initializes a new instance of . - public SemanticSearch() - { - Configurations = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Allows you to set the name of a default semantic configuration in your index, making it optional to pass it on as a query parameter every time. - /// The semantic configurations for the index. - internal SemanticSearch(string defaultConfigurationName, IList configurations) - { - DefaultConfigurationName = defaultConfigurationName; - Configurations = configurations; - } - - /// Allows you to set the name of a default semantic configuration in your index, making it optional to pass it on as a query parameter every time. - public string DefaultConfigurationName { get; set; } - /// The semantic configurations for the index. - public IList Configurations { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.Serialization.cs deleted file mode 100644 index df808f0b4452..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.Serialization.cs +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SentimentSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SentimentSkill DeserializeSentimentSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SentimentSkillLanguage? defaultLanguageCode = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new SentimentSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new SentimentSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SentimentSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSentimentSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.cs deleted file mode 100644 index 7e4f18241ddd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// This skill is deprecated. Use the V3.SentimentSkill instead. - public partial class SentimentSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - internal SentimentSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, SentimentSkillLanguage? defaultLanguageCode) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - ODataType = oDataType ?? "#Microsoft.Skills.Text.SentimentSkill"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.Serialization.cs deleted file mode 100644 index f45dad193ff2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.Serialization.cs +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal partial class SentimentSkillV3 : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(IncludeOpinionMining)) - { - writer.WritePropertyName("includeOpinionMining"u8); - writer.WriteBooleanValue(IncludeOpinionMining.Value); - } - if (Optional.IsDefined(ModelVersion)) - { - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SentimentSkillV3 DeserializeSentimentSkillV3(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string defaultLanguageCode = default; - bool? includeOpinionMining = default; - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = property.Value.GetString(); - continue; - } - if (property.NameEquals("includeOpinionMining"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - includeOpinionMining = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new SentimentSkillV3( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - includeOpinionMining, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SentimentSkillV3 FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSentimentSkillV3(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.cs deleted file mode 100644 index 805858c97bad..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillV3.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Using the Text Analytics API, evaluates unstructured text and for each record, provides sentiment labels (such as "negative", "neutral" and "positive") based on the highest confidence score found by the service at a sentence and document-level. - internal partial class SentimentSkillV3 : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public SentimentSkillV3(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.V3.SentimentSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// If set to true, the skill output will include information from Text Analytics for opinion mining, namely targets (nouns or verbs) and their associated assessment (adjective) in the text. Default is false. - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - internal SentimentSkillV3(string oDataType, string name, string description, string context, IList inputs, IList outputs, string defaultLanguageCode, bool? includeOpinionMining, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - IncludeOpinionMining = includeOpinionMining; - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Text.V3.SentimentSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public string DefaultLanguageCode { get; set; } - /// If set to true, the skill output will include information from Text Analytics for opinion mining, namely targets (nouns or verbs) and their associated assessment (adjective) in the text. Default is false. - public bool? IncludeOpinionMining { get; set; } - /// The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. - public string ModelVersion { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.Serialization.cs deleted file mode 100644 index eab94707aeab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.Serialization.cs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ShaperSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static ShaperSkill DeserializeShaperSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new ShaperSkill( - odataType, - name, - description, - context, - inputs, - outputs); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ShaperSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeShaperSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.cs deleted file mode 100644 index ae558e215234..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShaperSkill.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill for reshaping the outputs. It creates a complex type to support composite fields (also known as multipart fields). - public partial class ShaperSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public ShaperSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Util.ShaperSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - internal ShaperSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs) : base(oDataType, name, description, context, inputs, outputs) - { - ODataType = oDataType ?? "#Microsoft.Skills.Util.ShaperSkill"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.Serialization.cs deleted file mode 100644 index e61631618146..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.Serialization.cs +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class ShingleTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxShingleSize)) - { - writer.WritePropertyName("maxShingleSize"u8); - writer.WriteNumberValue(MaxShingleSize.Value); - } - if (Optional.IsDefined(MinShingleSize)) - { - writer.WritePropertyName("minShingleSize"u8); - writer.WriteNumberValue(MinShingleSize.Value); - } - if (Optional.IsDefined(OutputUnigrams)) - { - writer.WritePropertyName("outputUnigrams"u8); - writer.WriteBooleanValue(OutputUnigrams.Value); - } - if (Optional.IsDefined(OutputUnigramsIfNoShingles)) - { - writer.WritePropertyName("outputUnigramsIfNoShingles"u8); - writer.WriteBooleanValue(OutputUnigramsIfNoShingles.Value); - } - if (Optional.IsDefined(TokenSeparator)) - { - writer.WritePropertyName("tokenSeparator"u8); - writer.WriteStringValue(TokenSeparator); - } - if (Optional.IsDefined(FilterToken)) - { - writer.WritePropertyName("filterToken"u8); - writer.WriteStringValue(FilterToken); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static ShingleTokenFilter DeserializeShingleTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxShingleSize = default; - int? minShingleSize = default; - bool? outputUnigrams = default; - bool? outputUnigramsIfNoShingles = default; - string tokenSeparator = default; - string filterToken = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxShingleSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxShingleSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("minShingleSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - minShingleSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("outputUnigrams"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - outputUnigrams = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("outputUnigramsIfNoShingles"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - outputUnigramsIfNoShingles = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("tokenSeparator"u8)) - { - tokenSeparator = property.Value.GetString(); - continue; - } - if (property.NameEquals("filterToken"u8)) - { - filterToken = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new ShingleTokenFilter( - odataType, - name, - maxShingleSize, - minShingleSize, - outputUnigrams, - outputUnigramsIfNoShingles, - tokenSeparator, - filterToken); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new ShingleTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeShingleTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.cs deleted file mode 100644 index 77b9247439dd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ShingleTokenFilter.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Creates combinations of tokens as a single token. This token filter is implemented using Apache Lucene. - public partial class ShingleTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public ShingleTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.ShingleTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum shingle size. Default and minimum value is 2. - /// The minimum shingle size. Default and minimum value is 2. Must be less than the value of maxShingleSize. - /// A value indicating whether the output stream will contain the input tokens (unigrams) as well as shingles. Default is true. - /// A value indicating whether to output unigrams for those times when no shingles are available. This property takes precedence when outputUnigrams is set to false. Default is false. - /// The string to use when joining adjacent tokens to form a shingle. Default is a single space (" "). - /// The string to insert for each position at which there is no token. Default is an underscore ("_"). - internal ShingleTokenFilter(string oDataType, string name, int? maxShingleSize, int? minShingleSize, bool? outputUnigrams, bool? outputUnigramsIfNoShingles, string tokenSeparator, string filterToken) : base(oDataType, name) - { - MaxShingleSize = maxShingleSize; - MinShingleSize = minShingleSize; - OutputUnigrams = outputUnigrams; - OutputUnigramsIfNoShingles = outputUnigramsIfNoShingles; - TokenSeparator = tokenSeparator; - FilterToken = filterToken; - ODataType = oDataType ?? "#Microsoft.Azure.Search.ShingleTokenFilter"; - } - - /// The maximum shingle size. Default and minimum value is 2. - public int? MaxShingleSize { get; set; } - /// The minimum shingle size. Default and minimum value is 2. Must be less than the value of maxShingleSize. - public int? MinShingleSize { get; set; } - /// A value indicating whether the output stream will contain the input tokens (unigrams) as well as shingles. Default is true. - public bool? OutputUnigrams { get; set; } - /// A value indicating whether to output unigrams for those times when no shingles are available. This property takes precedence when outputUnigrams is set to false. Default is false. - public bool? OutputUnigramsIfNoShingles { get; set; } - /// The string to use when joining adjacent tokens to form a shingle. Default is a single space (" "). - public string TokenSeparator { get; set; } - /// The string to insert for each position at which there is no token. Default is an underscore ("_"). - public string FilterToken { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.Serialization.cs deleted file mode 100644 index e03a07a904ab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SimilarityAlgorithm : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SimilarityAlgorithm DeserializeSimilarityAlgorithm(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.BM25Similarity": return BM25Similarity.DeserializeBM25Similarity(element); - case "#Microsoft.Azure.Search.ClassicSimilarity": return ClassicSimilarity.DeserializeClassicSimilarity(element); - } - } - return UnknownSimilarity.DeserializeUnknownSimilarity(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SimilarityAlgorithm FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSimilarityAlgorithm(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.cs deleted file mode 100644 index 3531bc270017..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SimilarityAlgorithm.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for similarity algorithms. Similarity algorithms are used to calculate scores that tie queries to documents. The higher the score, the more relevant the document is to that specific query. Those scores are used to rank the search results. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public partial class SimilarityAlgorithm - { - /// Initializes a new instance of . - /// - internal SimilarityAlgorithm(string oDataType) - { - ODataType = oDataType; - } - - /// Gets or sets the o data type. - internal string ODataType { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.Serialization.cs deleted file mode 100644 index 9f683d2227c5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.Serialization.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class SingleVectorFieldResult - { - internal static SingleVectorFieldResult DeserializeSingleVectorFieldResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double? searchScore = default; - double? vectorSimilarity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("searchScore"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchScore = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("vectorSimilarity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - vectorSimilarity = property.Value.GetDouble(); - continue; - } - } - return new SingleVectorFieldResult(searchScore, vectorSimilarity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SingleVectorFieldResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSingleVectorFieldResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.cs deleted file mode 100644 index faf48b297de8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SingleVectorFieldResult.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// A single vector field result. Both @search.score and vector similarity values are returned. Vector similarity is related to @search.score by an equation. - public partial class SingleVectorFieldResult - { - /// Initializes a new instance of . - internal SingleVectorFieldResult() - { - } - - /// Initializes a new instance of . - /// The @search.score value that is calculated from the vector similarity score. This is the score that's visible in a pure single-field single-vector query. - /// The vector similarity score for this document. Note this is the canonical definition of similarity metric, not the 'distance' version. For example, cosine similarity instead of cosine distance. - internal SingleVectorFieldResult(double? searchScore, double? vectorSimilarity) - { - SearchScore = searchScore; - VectorSimilarity = vectorSimilarity; - } - - /// The @search.score value that is calculated from the vector similarity score. This is the score that's visible in a pure single-field single-vector query. - public double? SearchScore { get; } - /// The vector similarity score for this document. Note this is the canonical definition of similarity metric, not the 'distance' version. For example, cosine similarity instead of cosine distance. - public double? VectorSimilarity { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.Serialization.cs deleted file mode 100644 index 429feab6ea78..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.Serialization.cs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SnowballTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("language"u8); - writer.WriteStringValue(Language.ToSerialString()); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static SnowballTokenFilter DeserializeSnowballTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SnowballTokenFilterLanguage language = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("language"u8)) - { - language = property.Value.GetString().ToSnowballTokenFilterLanguage(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new SnowballTokenFilter(odataType, name, language); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SnowballTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSnowballTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.cs deleted file mode 100644 index bf7e59a8780d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilter.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A filter that stems words using a Snowball-generated stemmer. This token filter is implemented using Apache Lucene. - public partial class SnowballTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The language to use. - /// is null. - public SnowballTokenFilter(string name, SnowballTokenFilterLanguage language) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Language = language; - ODataType = "#Microsoft.Azure.Search.SnowballTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The language to use. - internal SnowballTokenFilter(string oDataType, string name, SnowballTokenFilterLanguage language) : base(oDataType, name) - { - Language = language; - ODataType = oDataType ?? "#Microsoft.Azure.Search.SnowballTokenFilter"; - } - - /// The language to use. - public SnowballTokenFilterLanguage Language { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.Serialization.cs deleted file mode 100644 index 37f94538d225..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.Serialization.cs +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class SnowballTokenFilterLanguageExtensions - { - public static string ToSerialString(this SnowballTokenFilterLanguage value) => value switch - { - SnowballTokenFilterLanguage.Armenian => "armenian", - SnowballTokenFilterLanguage.Basque => "basque", - SnowballTokenFilterLanguage.Catalan => "catalan", - SnowballTokenFilterLanguage.Danish => "danish", - SnowballTokenFilterLanguage.Dutch => "dutch", - SnowballTokenFilterLanguage.English => "english", - SnowballTokenFilterLanguage.Finnish => "finnish", - SnowballTokenFilterLanguage.French => "french", - SnowballTokenFilterLanguage.German => "german", - SnowballTokenFilterLanguage.German2 => "german2", - SnowballTokenFilterLanguage.Hungarian => "hungarian", - SnowballTokenFilterLanguage.Italian => "italian", - SnowballTokenFilterLanguage.Kp => "kp", - SnowballTokenFilterLanguage.Lovins => "lovins", - SnowballTokenFilterLanguage.Norwegian => "norwegian", - SnowballTokenFilterLanguage.Porter => "porter", - SnowballTokenFilterLanguage.Portuguese => "portuguese", - SnowballTokenFilterLanguage.Romanian => "romanian", - SnowballTokenFilterLanguage.Russian => "russian", - SnowballTokenFilterLanguage.Spanish => "spanish", - SnowballTokenFilterLanguage.Swedish => "swedish", - SnowballTokenFilterLanguage.Turkish => "turkish", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown SnowballTokenFilterLanguage value.") - }; - - public static SnowballTokenFilterLanguage ToSnowballTokenFilterLanguage(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "armenian")) return SnowballTokenFilterLanguage.Armenian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "basque")) return SnowballTokenFilterLanguage.Basque; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "catalan")) return SnowballTokenFilterLanguage.Catalan; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "danish")) return SnowballTokenFilterLanguage.Danish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutch")) return SnowballTokenFilterLanguage.Dutch; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "english")) return SnowballTokenFilterLanguage.English; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "finnish")) return SnowballTokenFilterLanguage.Finnish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "french")) return SnowballTokenFilterLanguage.French; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german")) return SnowballTokenFilterLanguage.German; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german2")) return SnowballTokenFilterLanguage.German2; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hungarian")) return SnowballTokenFilterLanguage.Hungarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "italian")) return SnowballTokenFilterLanguage.Italian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "kp")) return SnowballTokenFilterLanguage.Kp; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lovins")) return SnowballTokenFilterLanguage.Lovins; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "norwegian")) return SnowballTokenFilterLanguage.Norwegian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "porter")) return SnowballTokenFilterLanguage.Porter; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portuguese")) return SnowballTokenFilterLanguage.Portuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "romanian")) return SnowballTokenFilterLanguage.Romanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "russian")) return SnowballTokenFilterLanguage.Russian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "spanish")) return SnowballTokenFilterLanguage.Spanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "swedish")) return SnowballTokenFilterLanguage.Swedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "turkish")) return SnowballTokenFilterLanguage.Turkish; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown SnowballTokenFilterLanguage value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.cs deleted file mode 100644 index b57aef9bb5e9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SnowballTokenFilterLanguage.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The language to use for a Snowball token filter. - public enum SnowballTokenFilterLanguage - { - /// Selects the Lucene Snowball stemming tokenizer for Armenian. - Armenian, - /// Selects the Lucene Snowball stemming tokenizer for Basque. - Basque, - /// Selects the Lucene Snowball stemming tokenizer for Catalan. - Catalan, - /// Selects the Lucene Snowball stemming tokenizer for Danish. - Danish, - /// Selects the Lucene Snowball stemming tokenizer for Dutch. - Dutch, - /// Selects the Lucene Snowball stemming tokenizer for English. - English, - /// Selects the Lucene Snowball stemming tokenizer for Finnish. - Finnish, - /// Selects the Lucene Snowball stemming tokenizer for French. - French, - /// Selects the Lucene Snowball stemming tokenizer for German. - German, - /// Selects the Lucene Snowball stemming tokenizer that uses the German variant algorithm. - German2, - /// Selects the Lucene Snowball stemming tokenizer for Hungarian. - Hungarian, - /// Selects the Lucene Snowball stemming tokenizer for Italian. - Italian, - /// Selects the Lucene Snowball stemming tokenizer for Dutch that uses the Kraaij-Pohlmann stemming algorithm. - Kp, - /// Selects the Lucene Snowball stemming tokenizer for English that uses the Lovins stemming algorithm. - Lovins, - /// Selects the Lucene Snowball stemming tokenizer for Norwegian. - Norwegian, - /// Selects the Lucene Snowball stemming tokenizer for English that uses the Porter stemming algorithm. - Porter, - /// Selects the Lucene Snowball stemming tokenizer for Portuguese. - Portuguese, - /// Selects the Lucene Snowball stemming tokenizer for Romanian. - Romanian, - /// Selects the Lucene Snowball stemming tokenizer for Russian. - Russian, - /// Selects the Lucene Snowball stemming tokenizer for Spanish. - Spanish, - /// Selects the Lucene Snowball stemming tokenizer for Swedish. - Swedish, - /// Selects the Lucene Snowball stemming tokenizer for Turkish. - Turkish - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs deleted file mode 100644 index 1e19e52ee2bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SoftDeleteColumnDeletionDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(SoftDeleteColumnName)) - { - writer.WritePropertyName("softDeleteColumnName"u8); - writer.WriteStringValue(SoftDeleteColumnName); - } - if (Optional.IsDefined(SoftDeleteMarkerValue)) - { - writer.WritePropertyName("softDeleteMarkerValue"u8); - writer.WriteStringValue(SoftDeleteMarkerValue); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SoftDeleteColumnDeletionDetectionPolicy DeserializeSoftDeleteColumnDeletionDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string softDeleteColumnName = default; - string softDeleteMarkerValue = default; - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("softDeleteColumnName"u8)) - { - softDeleteColumnName = property.Value.GetString(); - continue; - } - if (property.NameEquals("softDeleteMarkerValue"u8)) - { - softDeleteMarkerValue = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new SoftDeleteColumnDeletionDetectionPolicy(odataType, softDeleteColumnName, softDeleteMarkerValue); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SoftDeleteColumnDeletionDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSoftDeleteColumnDeletionDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.cs deleted file mode 100644 index 365c25349dd2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SoftDeleteColumnDeletionDetectionPolicy.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a data deletion detection policy that implements a soft-deletion strategy. It determines whether an item should be deleted based on the value of a designated 'soft delete' column. - public partial class SoftDeleteColumnDeletionDetectionPolicy : DataDeletionDetectionPolicy - { - /// Initializes a new instance of . - public SoftDeleteColumnDeletionDetectionPolicy() - { - ODataType = "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of data deletion detection policy. - /// The name of the column to use for soft-deletion detection. - /// The marker value that identifies an item as deleted. - internal SoftDeleteColumnDeletionDetectionPolicy(string oDataType, string softDeleteColumnName, string softDeleteMarkerValue) : base(oDataType) - { - SoftDeleteColumnName = softDeleteColumnName; - SoftDeleteMarkerValue = softDeleteMarkerValue; - ODataType = oDataType ?? "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy"; - } - - /// The name of the column to use for soft-deletion detection. - public string SoftDeleteColumnName { get; set; } - /// The marker value that identifies an item as deleted. - public string SoftDeleteMarkerValue { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.Serialization.cs deleted file mode 100644 index 94e24fbd5571..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.Serialization.cs +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SplitSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(DefaultLanguageCode)) - { - if (DefaultLanguageCode != null) - { - writer.WritePropertyName("defaultLanguageCode"u8); - writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultLanguageCode"); - } - } - if (Optional.IsDefined(TextSplitMode)) - { - writer.WritePropertyName("textSplitMode"u8); - writer.WriteStringValue(TextSplitMode.Value.ToString()); - } - if (Optional.IsDefined(MaximumPageLength)) - { - if (MaximumPageLength != null) - { - writer.WritePropertyName("maximumPageLength"u8); - writer.WriteNumberValue(MaximumPageLength.Value); - } - else - { - writer.WriteNull("maximumPageLength"); - } - } - if (Optional.IsDefined(PageOverlapLength)) - { - if (PageOverlapLength != null) - { - writer.WritePropertyName("pageOverlapLength"u8); - writer.WriteNumberValue(PageOverlapLength.Value); - } - else - { - writer.WriteNull("pageOverlapLength"); - } - } - if (Optional.IsDefined(MaximumPagesToTake)) - { - if (MaximumPagesToTake != null) - { - writer.WritePropertyName("maximumPagesToTake"u8); - writer.WriteNumberValue(MaximumPagesToTake.Value); - } - else - { - writer.WriteNull("maximumPagesToTake"); - } - } - if (Optional.IsDefined(Unit)) - { - if (Unit != null) - { - writer.WritePropertyName("unit"u8); - writer.WriteStringValue(Unit.Value.ToString()); - } - else - { - writer.WriteNull("unit"); - } - } - if (Optional.IsDefined(AzureOpenAITokenizerParameters)) - { - if (AzureOpenAITokenizerParameters != null) - { - writer.WritePropertyName("azureOpenAITokenizerParameters"u8); - writer.WriteObjectValue(AzureOpenAITokenizerParameters); - } - else - { - writer.WriteNull("azureOpenAITokenizerParameters"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static SplitSkill DeserializeSplitSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - SplitSkillLanguage? defaultLanguageCode = default; - TextSplitMode? textSplitMode = default; - int? maximumPageLength = default; - int? pageOverlapLength = default; - int? maximumPagesToTake = default; - SplitSkillUnit? unit = default; - AzureOpenAITokenizerParameters azureOpenAITokenizerParameters = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultLanguageCode = null; - continue; - } - defaultLanguageCode = new SplitSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("textSplitMode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - textSplitMode = new TextSplitMode(property.Value.GetString()); - continue; - } - if (property.NameEquals("maximumPageLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maximumPageLength = null; - continue; - } - maximumPageLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("pageOverlapLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - pageOverlapLength = null; - continue; - } - pageOverlapLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("maximumPagesToTake"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - maximumPagesToTake = null; - continue; - } - maximumPagesToTake = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("unit"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - unit = null; - continue; - } - unit = new SplitSkillUnit(property.Value.GetString()); - continue; - } - if (property.NameEquals("azureOpenAITokenizerParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - azureOpenAITokenizerParameters = null; - continue; - } - azureOpenAITokenizerParameters = AzureOpenAITokenizerParameters.DeserializeAzureOpenAITokenizerParameters(property.Value); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new SplitSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultLanguageCode, - textSplitMode, - maximumPageLength, - pageOverlapLength, - maximumPagesToTake, - unit, - azureOpenAITokenizerParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SplitSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSplitSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.cs deleted file mode 100644 index 3661a7ba2a74..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkill.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill to split a string into chunks of text. - public partial class SplitSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// or is null. - public SplitSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ODataType = "#Microsoft.Skills.Text.SplitSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A value indicating which language code to use. Default is `en`. - /// A value indicating which split mode to perform. - /// The desired maximum page length. Default is 10000. - /// Only applicable when textSplitMode is set to 'pages'. If specified, n+1th chunk will start with this number of characters/tokens from the end of the nth chunk. - /// Only applicable when textSplitMode is set to 'pages'. If specified, the SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' pages, in order to improve performance when only a few initial pages are needed from each document. - /// Only applies if textSplitMode is set to pages. There are two possible values. The choice of the values will decide the length (maximumPageLength and pageOverlapLength) measurement. The default is 'characters', which means the length will be measured by character. - /// Only applies if the unit is set to azureOpenAITokens. If specified, the splitSkill will use these parameters when performing the tokenization. The parameters are a valid 'encoderModelName' and an optional 'allowedSpecialTokens' property. - internal SplitSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, SplitSkillLanguage? defaultLanguageCode, TextSplitMode? textSplitMode, int? maximumPageLength, int? pageOverlapLength, int? maximumPagesToTake, SplitSkillUnit? unit, AzureOpenAITokenizerParameters azureOpenAITokenizerParameters) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultLanguageCode = defaultLanguageCode; - TextSplitMode = textSplitMode; - MaximumPageLength = maximumPageLength; - PageOverlapLength = pageOverlapLength; - MaximumPagesToTake = maximumPagesToTake; - Unit = unit; - AzureOpenAITokenizerParameters = azureOpenAITokenizerParameters; - ODataType = oDataType ?? "#Microsoft.Skills.Text.SplitSkill"; - } - - /// A value indicating which language code to use. Default is `en`. - public SplitSkillLanguage? DefaultLanguageCode { get; set; } - /// A value indicating which split mode to perform. - public TextSplitMode? TextSplitMode { get; set; } - /// The desired maximum page length. Default is 10000. - public int? MaximumPageLength { get; set; } - /// Only applicable when textSplitMode is set to 'pages'. If specified, n+1th chunk will start with this number of characters/tokens from the end of the nth chunk. - public int? PageOverlapLength { get; set; } - /// Only applicable when textSplitMode is set to 'pages'. If specified, the SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' pages, in order to improve performance when only a few initial pages are needed from each document. - public int? MaximumPagesToTake { get; set; } - /// Only applies if textSplitMode is set to pages. There are two possible values. The choice of the values will decide the length (maximumPageLength and pageOverlapLength) measurement. The default is 'characters', which means the length will be measured by character. - public SplitSkillUnit? Unit { get; set; } - /// Only applies if the unit is set to azureOpenAITokens. If specified, the splitSkill will use these parameters when performing the tokenization. The parameters are a valid 'encoderModelName' and an optional 'allowedSpecialTokens' property. - public AzureOpenAITokenizerParameters AzureOpenAITokenizerParameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillEncoderModelName.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillEncoderModelName.cs deleted file mode 100644 index 840f2cc5b6e8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillEncoderModelName.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A value indicating which tokenizer to use. - public readonly partial struct SplitSkillEncoderModelName : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public SplitSkillEncoderModelName(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string R50KBaseValue = "r50k_base"; - private const string P50KBaseValue = "p50k_base"; - private const string P50KEditValue = "p50k_edit"; - private const string CL100KBaseValue = "cl100k_base"; - - /// Refers to a base model trained with a 50,000 token vocabulary, often used in general natural language processing tasks. - public static SplitSkillEncoderModelName R50KBase { get; } = new SplitSkillEncoderModelName(R50KBaseValue); - /// A base model with a 50,000 token vocabulary, optimized for prompt-based tasks. - public static SplitSkillEncoderModelName P50KBase { get; } = new SplitSkillEncoderModelName(P50KBaseValue); - /// Similar to p50k_base but fine-tuned for editing or rephrasing tasks with a 50,000 token vocabulary. - public static SplitSkillEncoderModelName P50KEdit { get; } = new SplitSkillEncoderModelName(P50KEditValue); - /// A base model with a 100,000 token vocabulary. - public static SplitSkillEncoderModelName CL100KBase { get; } = new SplitSkillEncoderModelName(CL100KBaseValue); - /// Determines if two values are the same. - public static bool operator ==(SplitSkillEncoderModelName left, SplitSkillEncoderModelName right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(SplitSkillEncoderModelName left, SplitSkillEncoderModelName right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator SplitSkillEncoderModelName(string value) => new SplitSkillEncoderModelName(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is SplitSkillEncoderModelName other && Equals(other); - /// - public bool Equals(SplitSkillEncoderModelName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.Serialization.cs deleted file mode 100644 index 22d6f9cbc5a6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SqlIntegratedChangeTrackingPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static SqlIntegratedChangeTrackingPolicy DeserializeSqlIntegratedChangeTrackingPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new SqlIntegratedChangeTrackingPolicy(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SqlIntegratedChangeTrackingPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSqlIntegratedChangeTrackingPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.cs deleted file mode 100644 index 211f7198894b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SqlIntegratedChangeTrackingPolicy.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a data change detection policy that captures changes using the Integrated Change Tracking feature of Azure SQL Database. - public partial class SqlIntegratedChangeTrackingPolicy : DataChangeDetectionPolicy - { - /// Initializes a new instance of . - public SqlIntegratedChangeTrackingPolicy() - { - ODataType = "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of data change detection policy. - internal SqlIntegratedChangeTrackingPolicy(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.Serialization.cs deleted file mode 100644 index 8c02af706bcb..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.Serialization.cs +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class StemmerOverrideTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("rules"u8); - writer.WriteStartArray(); - foreach (var item in Rules) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static StemmerOverrideTokenFilter DeserializeStemmerOverrideTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList rules = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("rules"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - rules = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new StemmerOverrideTokenFilter(odataType, name, rules); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new StemmerOverrideTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeStemmerOverrideTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.cs deleted file mode 100644 index 0422ba0332f7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerOverrideTokenFilter.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Provides the ability to override other stemming filters with custom dictionary-based stemming. Any dictionary-stemmed terms will be marked as keywords so that they will not be stemmed with stemmers down the chain. Must be placed before any stemming filters. This token filter is implemented using Apache Lucene. - public partial class StemmerOverrideTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of stemming rules in the following format: "word => stem", for example: "ran => run". - /// or is null. - public StemmerOverrideTokenFilter(string name, IEnumerable rules) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(rules, nameof(rules)); - - Rules = rules.ToList(); - ODataType = "#Microsoft.Azure.Search.StemmerOverrideTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of stemming rules in the following format: "word => stem", for example: "ran => run". - internal StemmerOverrideTokenFilter(string oDataType, string name, IList rules) : base(oDataType, name) - { - Rules = rules; - ODataType = oDataType ?? "#Microsoft.Azure.Search.StemmerOverrideTokenFilter"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.Serialization.cs deleted file mode 100644 index 72d557901f4c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.Serialization.cs +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class StemmerTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("language"u8); - writer.WriteStringValue(Language.ToSerialString()); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static StemmerTokenFilter DeserializeStemmerTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - StemmerTokenFilterLanguage language = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("language"u8)) - { - language = property.Value.GetString().ToStemmerTokenFilterLanguage(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new StemmerTokenFilter(odataType, name, language); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new StemmerTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeStemmerTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.cs deleted file mode 100644 index 4115bd3d5f9f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilter.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Language specific stemming filter. This token filter is implemented using Apache Lucene. - public partial class StemmerTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The language to use. - /// is null. - public StemmerTokenFilter(string name, StemmerTokenFilterLanguage language) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Language = language; - ODataType = "#Microsoft.Azure.Search.StemmerTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The language to use. - internal StemmerTokenFilter(string oDataType, string name, StemmerTokenFilterLanguage language) : base(oDataType, name) - { - Language = language; - ODataType = oDataType ?? "#Microsoft.Azure.Search.StemmerTokenFilter"; - } - - /// The language to use. - public StemmerTokenFilterLanguage Language { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.Serialization.cs deleted file mode 100644 index fd41b82b8fbf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.Serialization.cs +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class StemmerTokenFilterLanguageExtensions - { - public static string ToSerialString(this StemmerTokenFilterLanguage value) => value switch - { - StemmerTokenFilterLanguage.Arabic => "arabic", - StemmerTokenFilterLanguage.Armenian => "armenian", - StemmerTokenFilterLanguage.Basque => "basque", - StemmerTokenFilterLanguage.Brazilian => "brazilian", - StemmerTokenFilterLanguage.Bulgarian => "bulgarian", - StemmerTokenFilterLanguage.Catalan => "catalan", - StemmerTokenFilterLanguage.Czech => "czech", - StemmerTokenFilterLanguage.Danish => "danish", - StemmerTokenFilterLanguage.Dutch => "dutch", - StemmerTokenFilterLanguage.DutchKp => "dutchKp", - StemmerTokenFilterLanguage.English => "english", - StemmerTokenFilterLanguage.LightEnglish => "lightEnglish", - StemmerTokenFilterLanguage.MinimalEnglish => "minimalEnglish", - StemmerTokenFilterLanguage.PossessiveEnglish => "possessiveEnglish", - StemmerTokenFilterLanguage.Porter2 => "porter2", - StemmerTokenFilterLanguage.Lovins => "lovins", - StemmerTokenFilterLanguage.Finnish => "finnish", - StemmerTokenFilterLanguage.LightFinnish => "lightFinnish", - StemmerTokenFilterLanguage.French => "french", - StemmerTokenFilterLanguage.LightFrench => "lightFrench", - StemmerTokenFilterLanguage.MinimalFrench => "minimalFrench", - StemmerTokenFilterLanguage.Galician => "galician", - StemmerTokenFilterLanguage.MinimalGalician => "minimalGalician", - StemmerTokenFilterLanguage.German => "german", - StemmerTokenFilterLanguage.German2 => "german2", - StemmerTokenFilterLanguage.LightGerman => "lightGerman", - StemmerTokenFilterLanguage.MinimalGerman => "minimalGerman", - StemmerTokenFilterLanguage.Greek => "greek", - StemmerTokenFilterLanguage.Hindi => "hindi", - StemmerTokenFilterLanguage.Hungarian => "hungarian", - StemmerTokenFilterLanguage.LightHungarian => "lightHungarian", - StemmerTokenFilterLanguage.Indonesian => "indonesian", - StemmerTokenFilterLanguage.Irish => "irish", - StemmerTokenFilterLanguage.Italian => "italian", - StemmerTokenFilterLanguage.LightItalian => "lightItalian", - StemmerTokenFilterLanguage.Sorani => "sorani", - StemmerTokenFilterLanguage.Latvian => "latvian", - StemmerTokenFilterLanguage.Norwegian => "norwegian", - StemmerTokenFilterLanguage.LightNorwegian => "lightNorwegian", - StemmerTokenFilterLanguage.MinimalNorwegian => "minimalNorwegian", - StemmerTokenFilterLanguage.LightNynorsk => "lightNynorsk", - StemmerTokenFilterLanguage.MinimalNynorsk => "minimalNynorsk", - StemmerTokenFilterLanguage.Portuguese => "portuguese", - StemmerTokenFilterLanguage.LightPortuguese => "lightPortuguese", - StemmerTokenFilterLanguage.MinimalPortuguese => "minimalPortuguese", - StemmerTokenFilterLanguage.PortugueseRslp => "portugueseRslp", - StemmerTokenFilterLanguage.Romanian => "romanian", - StemmerTokenFilterLanguage.Russian => "russian", - StemmerTokenFilterLanguage.LightRussian => "lightRussian", - StemmerTokenFilterLanguage.Spanish => "spanish", - StemmerTokenFilterLanguage.LightSpanish => "lightSpanish", - StemmerTokenFilterLanguage.Swedish => "swedish", - StemmerTokenFilterLanguage.LightSwedish => "lightSwedish", - StemmerTokenFilterLanguage.Turkish => "turkish", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown StemmerTokenFilterLanguage value.") - }; - - public static StemmerTokenFilterLanguage ToStemmerTokenFilterLanguage(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "arabic")) return StemmerTokenFilterLanguage.Arabic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "armenian")) return StemmerTokenFilterLanguage.Armenian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "basque")) return StemmerTokenFilterLanguage.Basque; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "brazilian")) return StemmerTokenFilterLanguage.Brazilian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bulgarian")) return StemmerTokenFilterLanguage.Bulgarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "catalan")) return StemmerTokenFilterLanguage.Catalan; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "czech")) return StemmerTokenFilterLanguage.Czech; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "danish")) return StemmerTokenFilterLanguage.Danish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutch")) return StemmerTokenFilterLanguage.Dutch; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutchKp")) return StemmerTokenFilterLanguage.DutchKp; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "english")) return StemmerTokenFilterLanguage.English; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightEnglish")) return StemmerTokenFilterLanguage.LightEnglish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalEnglish")) return StemmerTokenFilterLanguage.MinimalEnglish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "possessiveEnglish")) return StemmerTokenFilterLanguage.PossessiveEnglish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "porter2")) return StemmerTokenFilterLanguage.Porter2; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lovins")) return StemmerTokenFilterLanguage.Lovins; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "finnish")) return StemmerTokenFilterLanguage.Finnish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightFinnish")) return StemmerTokenFilterLanguage.LightFinnish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "french")) return StemmerTokenFilterLanguage.French; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightFrench")) return StemmerTokenFilterLanguage.LightFrench; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalFrench")) return StemmerTokenFilterLanguage.MinimalFrench; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "galician")) return StemmerTokenFilterLanguage.Galician; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalGalician")) return StemmerTokenFilterLanguage.MinimalGalician; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german")) return StemmerTokenFilterLanguage.German; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german2")) return StemmerTokenFilterLanguage.German2; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightGerman")) return StemmerTokenFilterLanguage.LightGerman; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalGerman")) return StemmerTokenFilterLanguage.MinimalGerman; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "greek")) return StemmerTokenFilterLanguage.Greek; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hindi")) return StemmerTokenFilterLanguage.Hindi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hungarian")) return StemmerTokenFilterLanguage.Hungarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightHungarian")) return StemmerTokenFilterLanguage.LightHungarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "indonesian")) return StemmerTokenFilterLanguage.Indonesian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "irish")) return StemmerTokenFilterLanguage.Irish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "italian")) return StemmerTokenFilterLanguage.Italian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightItalian")) return StemmerTokenFilterLanguage.LightItalian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "sorani")) return StemmerTokenFilterLanguage.Sorani; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "latvian")) return StemmerTokenFilterLanguage.Latvian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "norwegian")) return StemmerTokenFilterLanguage.Norwegian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightNorwegian")) return StemmerTokenFilterLanguage.LightNorwegian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalNorwegian")) return StemmerTokenFilterLanguage.MinimalNorwegian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightNynorsk")) return StemmerTokenFilterLanguage.LightNynorsk; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalNynorsk")) return StemmerTokenFilterLanguage.MinimalNynorsk; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portuguese")) return StemmerTokenFilterLanguage.Portuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightPortuguese")) return StemmerTokenFilterLanguage.LightPortuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "minimalPortuguese")) return StemmerTokenFilterLanguage.MinimalPortuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portugueseRslp")) return StemmerTokenFilterLanguage.PortugueseRslp; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "romanian")) return StemmerTokenFilterLanguage.Romanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "russian")) return StemmerTokenFilterLanguage.Russian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightRussian")) return StemmerTokenFilterLanguage.LightRussian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "spanish")) return StemmerTokenFilterLanguage.Spanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightSpanish")) return StemmerTokenFilterLanguage.LightSpanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "swedish")) return StemmerTokenFilterLanguage.Swedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "lightSwedish")) return StemmerTokenFilterLanguage.LightSwedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "turkish")) return StemmerTokenFilterLanguage.Turkish; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown StemmerTokenFilterLanguage value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.cs deleted file mode 100644 index 4463bf335721..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StemmerTokenFilterLanguage.cs +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The language to use for a stemmer token filter. - public enum StemmerTokenFilterLanguage - { - /// Selects the Lucene stemming tokenizer for Arabic. - Arabic, - /// Selects the Lucene stemming tokenizer for Armenian. - Armenian, - /// Selects the Lucene stemming tokenizer for Basque. - Basque, - /// Selects the Lucene stemming tokenizer for Portuguese (Brazil). - Brazilian, - /// Selects the Lucene stemming tokenizer for Bulgarian. - Bulgarian, - /// Selects the Lucene stemming tokenizer for Catalan. - Catalan, - /// Selects the Lucene stemming tokenizer for Czech. - Czech, - /// Selects the Lucene stemming tokenizer for Danish. - Danish, - /// Selects the Lucene stemming tokenizer for Dutch. - Dutch, - /// Selects the Lucene stemming tokenizer for Dutch that uses the Kraaij-Pohlmann stemming algorithm. - DutchKp, - /// Selects the Lucene stemming tokenizer for English. - English, - /// Selects the Lucene stemming tokenizer for English that does light stemming. - LightEnglish, - /// Selects the Lucene stemming tokenizer for English that does minimal stemming. - MinimalEnglish, - /// Selects the Lucene stemming tokenizer for English that removes trailing possessives from words. - PossessiveEnglish, - /// Selects the Lucene stemming tokenizer for English that uses the Porter2 stemming algorithm. - Porter2, - /// Selects the Lucene stemming tokenizer for English that uses the Lovins stemming algorithm. - Lovins, - /// Selects the Lucene stemming tokenizer for Finnish. - Finnish, - /// Selects the Lucene stemming tokenizer for Finnish that does light stemming. - LightFinnish, - /// Selects the Lucene stemming tokenizer for French. - French, - /// Selects the Lucene stemming tokenizer for French that does light stemming. - LightFrench, - /// Selects the Lucene stemming tokenizer for French that does minimal stemming. - MinimalFrench, - /// Selects the Lucene stemming tokenizer for Galician. - Galician, - /// Selects the Lucene stemming tokenizer for Galician that does minimal stemming. - MinimalGalician, - /// Selects the Lucene stemming tokenizer for German. - German, - /// Selects the Lucene stemming tokenizer that uses the German variant algorithm. - German2, - /// Selects the Lucene stemming tokenizer for German that does light stemming. - LightGerman, - /// Selects the Lucene stemming tokenizer for German that does minimal stemming. - MinimalGerman, - /// Selects the Lucene stemming tokenizer for Greek. - Greek, - /// Selects the Lucene stemming tokenizer for Hindi. - Hindi, - /// Selects the Lucene stemming tokenizer for Hungarian. - Hungarian, - /// Selects the Lucene stemming tokenizer for Hungarian that does light stemming. - LightHungarian, - /// Selects the Lucene stemming tokenizer for Indonesian. - Indonesian, - /// Selects the Lucene stemming tokenizer for Irish. - Irish, - /// Selects the Lucene stemming tokenizer for Italian. - Italian, - /// Selects the Lucene stemming tokenizer for Italian that does light stemming. - LightItalian, - /// Selects the Lucene stemming tokenizer for Sorani. - Sorani, - /// Selects the Lucene stemming tokenizer for Latvian. - Latvian, - /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål). - Norwegian, - /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål) that does light stemming. - LightNorwegian, - /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål) that does minimal stemming. - MinimalNorwegian, - /// Selects the Lucene stemming tokenizer for Norwegian (Nynorsk) that does light stemming. - LightNynorsk, - /// Selects the Lucene stemming tokenizer for Norwegian (Nynorsk) that does minimal stemming. - MinimalNynorsk, - /// Selects the Lucene stemming tokenizer for Portuguese. - Portuguese, - /// Selects the Lucene stemming tokenizer for Portuguese that does light stemming. - LightPortuguese, - /// Selects the Lucene stemming tokenizer for Portuguese that does minimal stemming. - MinimalPortuguese, - /// Selects the Lucene stemming tokenizer for Portuguese that uses the RSLP stemming algorithm. - PortugueseRslp, - /// Selects the Lucene stemming tokenizer for Romanian. - Romanian, - /// Selects the Lucene stemming tokenizer for Russian. - Russian, - /// Selects the Lucene stemming tokenizer for Russian that does light stemming. - LightRussian, - /// Selects the Lucene stemming tokenizer for Spanish. - Spanish, - /// Selects the Lucene stemming tokenizer for Spanish that does light stemming. - LightSpanish, - /// Selects the Lucene stemming tokenizer for Swedish. - Swedish, - /// Selects the Lucene stemming tokenizer for Swedish that does light stemming. - LightSwedish, - /// Selects the Lucene stemming tokenizer for Turkish. - Turkish - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.Serialization.cs deleted file mode 100644 index 21bd8d0f6d9f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.Serialization.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class StopAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Stopwords)) - { - writer.WritePropertyName("stopwords"u8); - writer.WriteStartArray(); - foreach (var item in Stopwords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static StopAnalyzer DeserializeStopAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList stopwords = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("stopwords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - stopwords = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new StopAnalyzer(odataType, name, stopwords ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new StopAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeStopAnalyzer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.cs deleted file mode 100644 index 5b8283254e8d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopAnalyzer.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Divides text at non-letters; Applies the lowercase and stopword token filters. This analyzer is implemented using Apache Lucene. - public partial class StopAnalyzer : LexicalAnalyzer - { - /// Initializes a new instance of . - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public StopAnalyzer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Stopwords = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.StopAnalyzer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of stopwords. - internal StopAnalyzer(string oDataType, string name, IList stopwords) : base(oDataType, name) - { - Stopwords = stopwords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.StopAnalyzer"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.Serialization.cs deleted file mode 100644 index fb33087015cf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.Serialization.cs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class StopwordsListExtensions - { - public static string ToSerialString(this StopwordsList value) => value switch - { - StopwordsList.Arabic => "arabic", - StopwordsList.Armenian => "armenian", - StopwordsList.Basque => "basque", - StopwordsList.Brazilian => "brazilian", - StopwordsList.Bulgarian => "bulgarian", - StopwordsList.Catalan => "catalan", - StopwordsList.Czech => "czech", - StopwordsList.Danish => "danish", - StopwordsList.Dutch => "dutch", - StopwordsList.English => "english", - StopwordsList.Finnish => "finnish", - StopwordsList.French => "french", - StopwordsList.Galician => "galician", - StopwordsList.German => "german", - StopwordsList.Greek => "greek", - StopwordsList.Hindi => "hindi", - StopwordsList.Hungarian => "hungarian", - StopwordsList.Indonesian => "indonesian", - StopwordsList.Irish => "irish", - StopwordsList.Italian => "italian", - StopwordsList.Latvian => "latvian", - StopwordsList.Norwegian => "norwegian", - StopwordsList.Persian => "persian", - StopwordsList.Portuguese => "portuguese", - StopwordsList.Romanian => "romanian", - StopwordsList.Russian => "russian", - StopwordsList.Sorani => "sorani", - StopwordsList.Spanish => "spanish", - StopwordsList.Swedish => "swedish", - StopwordsList.Thai => "thai", - StopwordsList.Turkish => "turkish", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown StopwordsList value.") - }; - - public static StopwordsList ToStopwordsList(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "arabic")) return StopwordsList.Arabic; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "armenian")) return StopwordsList.Armenian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "basque")) return StopwordsList.Basque; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "brazilian")) return StopwordsList.Brazilian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "bulgarian")) return StopwordsList.Bulgarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "catalan")) return StopwordsList.Catalan; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "czech")) return StopwordsList.Czech; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "danish")) return StopwordsList.Danish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "dutch")) return StopwordsList.Dutch; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "english")) return StopwordsList.English; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "finnish")) return StopwordsList.Finnish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "french")) return StopwordsList.French; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "galician")) return StopwordsList.Galician; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "german")) return StopwordsList.German; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "greek")) return StopwordsList.Greek; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hindi")) return StopwordsList.Hindi; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "hungarian")) return StopwordsList.Hungarian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "indonesian")) return StopwordsList.Indonesian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "irish")) return StopwordsList.Irish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "italian")) return StopwordsList.Italian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "latvian")) return StopwordsList.Latvian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "norwegian")) return StopwordsList.Norwegian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "persian")) return StopwordsList.Persian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "portuguese")) return StopwordsList.Portuguese; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "romanian")) return StopwordsList.Romanian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "russian")) return StopwordsList.Russian; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "sorani")) return StopwordsList.Sorani; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "spanish")) return StopwordsList.Spanish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "swedish")) return StopwordsList.Swedish; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "thai")) return StopwordsList.Thai; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "turkish")) return StopwordsList.Turkish; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown StopwordsList value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.cs deleted file mode 100644 index 078d3b1d66ab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsList.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Identifies a predefined list of language-specific stopwords. - public enum StopwordsList - { - /// Selects the stopword list for Arabic. - Arabic, - /// Selects the stopword list for Armenian. - Armenian, - /// Selects the stopword list for Basque. - Basque, - /// Selects the stopword list for Portuguese (Brazil). - Brazilian, - /// Selects the stopword list for Bulgarian. - Bulgarian, - /// Selects the stopword list for Catalan. - Catalan, - /// Selects the stopword list for Czech. - Czech, - /// Selects the stopword list for Danish. - Danish, - /// Selects the stopword list for Dutch. - Dutch, - /// Selects the stopword list for English. - English, - /// Selects the stopword list for Finnish. - Finnish, - /// Selects the stopword list for French. - French, - /// Selects the stopword list for Galician. - Galician, - /// Selects the stopword list for German. - German, - /// Selects the stopword list for Greek. - Greek, - /// Selects the stopword list for Hindi. - Hindi, - /// Selects the stopword list for Hungarian. - Hungarian, - /// Selects the stopword list for Indonesian. - Indonesian, - /// Selects the stopword list for Irish. - Irish, - /// Selects the stopword list for Italian. - Italian, - /// Selects the stopword list for Latvian. - Latvian, - /// Selects the stopword list for Norwegian. - Norwegian, - /// Selects the stopword list for Persian. - Persian, - /// Selects the stopword list for Portuguese. - Portuguese, - /// Selects the stopword list for Romanian. - Romanian, - /// Selects the stopword list for Russian. - Russian, - /// Selects the stopword list for Sorani. - Sorani, - /// Selects the stopword list for Spanish. - Spanish, - /// Selects the stopword list for Swedish. - Swedish, - /// Selects the stopword list for Thai. - Thai, - /// Selects the stopword list for Turkish. - Turkish - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.Serialization.cs deleted file mode 100644 index 8860e97e8e8b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.Serialization.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class StopwordsTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Stopwords)) - { - writer.WritePropertyName("stopwords"u8); - writer.WriteStartArray(); - foreach (var item in Stopwords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsDefined(StopwordsList)) - { - writer.WritePropertyName("stopwordsList"u8); - writer.WriteStringValue(StopwordsList.Value.ToSerialString()); - } - if (Optional.IsDefined(IgnoreCase)) - { - writer.WritePropertyName("ignoreCase"u8); - writer.WriteBooleanValue(IgnoreCase.Value); - } - if (Optional.IsDefined(RemoveTrailingStopWords)) - { - writer.WritePropertyName("removeTrailing"u8); - writer.WriteBooleanValue(RemoveTrailingStopWords.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static StopwordsTokenFilter DeserializeStopwordsTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList stopwords = default; - StopwordsList? stopwordsList = default; - bool? ignoreCase = default; - bool? removeTrailing = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("stopwords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - stopwords = array; - continue; - } - if (property.NameEquals("stopwordsList"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - stopwordsList = property.Value.GetString().ToStopwordsList(); - continue; - } - if (property.NameEquals("ignoreCase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - ignoreCase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("removeTrailing"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - removeTrailing = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new StopwordsTokenFilter( - odataType, - name, - stopwords ?? new ChangeTrackingList(), - stopwordsList, - ignoreCase, - removeTrailing); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new StopwordsTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeStopwordsTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.cs deleted file mode 100644 index 8693c1506cf5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/StopwordsTokenFilter.cs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Removes stop words from a token stream. This token filter is implemented using Apache Lucene. - public partial class StopwordsTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public StopwordsTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - Stopwords = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.StopwordsTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The list of stopwords. This property and the stopwords list property cannot both be set. - /// A predefined list of stopwords to use. This property and the stopwords property cannot both be set. Default is English. - /// A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. - /// A value indicating whether to ignore the last search term if it's a stop word. Default is true. - internal StopwordsTokenFilter(string oDataType, string name, IList stopwords, StopwordsList? stopwordsList, bool? ignoreCase, bool? removeTrailingStopWords) : base(oDataType, name) - { - Stopwords = stopwords; - StopwordsList = stopwordsList; - IgnoreCase = ignoreCase; - RemoveTrailingStopWords = removeTrailingStopWords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.StopwordsTokenFilter"; - } - /// A predefined list of stopwords to use. This property and the stopwords property cannot both be set. Default is English. - public StopwordsList? StopwordsList { get; set; } - /// A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. - public bool? IgnoreCase { get; set; } - /// A value indicating whether to ignore the last search term if it's a stop word. Default is true. - public bool? RemoveTrailingStopWords { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.Serialization.cs deleted file mode 100644 index 85d2f559d9db..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class SuggestDocumentsResult - { - internal static SuggestDocumentsResult DeserializeSuggestDocumentsResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IReadOnlyList value = default; - double? searchCoverage = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(SuggestResult.DeserializeSuggestResult(item)); - } - value = array; - continue; - } - if (property.NameEquals("@search.coverage"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchCoverage = property.Value.GetDouble(); - continue; - } - } - return new SuggestDocumentsResult(value, searchCoverage); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SuggestDocumentsResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSuggestDocumentsResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.cs deleted file mode 100644 index eb23565105f0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestDocumentsResult.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Models -{ - /// Response containing suggestion query results from an index. - internal partial class SuggestDocumentsResult - { - /// Initializes a new instance of . - /// The sequence of results returned by the query. - internal SuggestDocumentsResult(IEnumerable results) - { - Results = results.ToList(); - } - - /// Initializes a new instance of . - /// The sequence of results returned by the query. - /// A value indicating the percentage of the index that was included in the query, or null if minimumCoverage was not set in the request. - internal SuggestDocumentsResult(IReadOnlyList results, double? coverage) - { - Results = results; - Coverage = coverage; - } - - /// The sequence of results returned by the query. - public IReadOnlyList Results { get; } - /// A value indicating the percentage of the index that was included in the query, or null if minimumCoverage was not set in the request. - public double? Coverage { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.Serialization.cs deleted file mode 100644 index 719b7feb055c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.Serialization.cs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents -{ - public partial class SuggestOptions : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Filter)) - { - writer.WritePropertyName("filter"u8); - writer.WriteStringValue(Filter); - } - if (Optional.IsDefined(UseFuzzyMatching)) - { - writer.WritePropertyName("fuzzy"u8); - writer.WriteBooleanValue(UseFuzzyMatching.Value); - } - if (Optional.IsDefined(HighlightPostTag)) - { - writer.WritePropertyName("highlightPostTag"u8); - writer.WriteStringValue(HighlightPostTag); - } - if (Optional.IsDefined(HighlightPreTag)) - { - writer.WritePropertyName("highlightPreTag"u8); - writer.WriteStringValue(HighlightPreTag); - } - if (Optional.IsDefined(MinimumCoverage)) - { - writer.WritePropertyName("minimumCoverage"u8); - writer.WriteNumberValue(MinimumCoverage.Value); - } - if (Optional.IsDefined(OrderByRaw)) - { - writer.WritePropertyName("orderby"u8); - writer.WriteStringValue(OrderByRaw); - } - writer.WritePropertyName("search"u8); - writer.WriteStringValue(SearchText); - if (Optional.IsDefined(SearchFieldsRaw)) - { - writer.WritePropertyName("searchFields"u8); - writer.WriteStringValue(SearchFieldsRaw); - } - if (Optional.IsDefined(SelectRaw)) - { - writer.WritePropertyName("select"u8); - writer.WriteStringValue(SelectRaw); - } - writer.WritePropertyName("suggesterName"u8); - writer.WriteStringValue(SuggesterName); - if (Optional.IsDefined(Size)) - { - writer.WritePropertyName("top"u8); - writer.WriteNumberValue(Size.Value); - } - writer.WriteEndObject(); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.cs deleted file mode 100644 index e2c12b86e235..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestOptions.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents -{ - /// Parameters for filtering, sorting, fuzzy matching, and other suggestions query behaviors. - public partial class SuggestOptions - { - /// Initializes a new instance of . - /// An OData expression that filters the documents considered for suggestions. - /// A value indicating whether to use fuzzy matching for the suggestion query. Default is false. When set to true, the query will find suggestions even if there's a substituted or missing character in the search text. While this provides a better experience in some scenarios, it comes at a performance cost as fuzzy suggestion searches are slower and consume more resources. - /// A string tag that is appended to hit highlights. Must be set with highlightPreTag. If omitted, hit highlighting of suggestions is disabled. - /// A string tag that is prepended to hit highlights. Must be set with highlightPostTag. If omitted, hit highlighting of suggestions is disabled. - /// A number between 0 and 100 indicating the percentage of the index that must be covered by a suggestion query in order for the query to be reported as a success. This parameter can be useful for ensuring search availability even for services with only one replica. The default is 80. - /// The comma-separated list of OData $orderby expressions by which to sort the results. Each expression can be either a field name or a call to either the geo.distance() or the search.score() functions. Each expression can be followed by asc to indicate ascending, or desc to indicate descending. The default is ascending order. Ties will be broken by the match scores of documents. If no $orderby is specified, the default sort order is descending by document match score. There can be at most 32 $orderby clauses. - /// The search text to use to suggest documents. Must be at least 1 character, and no more than 100 characters. - /// The comma-separated list of field names to search for the specified search text. Target fields must be included in the specified suggester. - /// The comma-separated list of fields to retrieve. If unspecified, only the key field will be included in the results. - /// The name of the suggester as specified in the suggesters collection that's part of the index definition. - /// The number of suggestions to retrieve. This must be a value between 1 and 100. The default is 5. - internal SuggestOptions(string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, string orderByRaw, string searchText, string searchFieldsRaw, string selectRaw, string suggesterName, int? size) - { - Filter = filter; - UseFuzzyMatching = useFuzzyMatching; - HighlightPostTag = highlightPostTag; - HighlightPreTag = highlightPreTag; - MinimumCoverage = minimumCoverage; - OrderByRaw = orderByRaw; - SearchText = searchText; - SearchFieldsRaw = searchFieldsRaw; - SelectRaw = selectRaw; - SuggesterName = suggesterName; - Size = size; - } - /// A value indicating whether to use fuzzy matching for the suggestion query. Default is false. When set to true, the query will find suggestions even if there's a substituted or missing character in the search text. While this provides a better experience in some scenarios, it comes at a performance cost as fuzzy suggestion searches are slower and consume more resources. - public bool? UseFuzzyMatching { get; set; } - /// A string tag that is appended to hit highlights. Must be set with highlightPreTag. If omitted, hit highlighting of suggestions is disabled. - public string HighlightPostTag { get; set; } - /// A string tag that is prepended to hit highlights. Must be set with highlightPostTag. If omitted, hit highlighting of suggestions is disabled. - public string HighlightPreTag { get; set; } - /// A number between 0 and 100 indicating the percentage of the index that must be covered by a suggestion query in order for the query to be reported as a success. This parameter can be useful for ensuring search availability even for services with only one replica. The default is 80. - public double? MinimumCoverage { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.Serialization.cs deleted file mode 100644 index f106b4154e29..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.Serialization.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - internal partial class SuggestResult - { - internal static SuggestResult DeserializeSuggestResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string searchText = default; - IReadOnlyDictionary additionalProperties = default; - Dictionary additionalPropertiesDictionary = new Dictionary(); - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@search.text"u8)) - { - searchText = property.Value.GetString(); - continue; - } - additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); - } - additionalProperties = additionalPropertiesDictionary; - return new SuggestResult(searchText, additionalProperties); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SuggestResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSuggestResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.cs deleted file mode 100644 index c4023257c183..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SuggestResult.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Models -{ - /// A result containing a document found by a suggestion query, plus associated metadata. - internal partial class SuggestResult - { - /// Initializes a new instance of . - /// The text of the suggestion result. - internal SuggestResult(string text) - { - Text = text; - AdditionalProperties = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The text of the suggestion result. - /// Additional Properties. - internal SuggestResult(string text, IReadOnlyDictionary additionalProperties) - { - Text = text; - AdditionalProperties = additionalProperties; - } - - /// The text of the suggestion result. - public string Text { get; } - /// Additional Properties. - public IReadOnlyDictionary AdditionalProperties { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.Serialization.cs deleted file mode 100644 index 51f87de02fe1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.Serialization.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SynonymMap : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("format"u8); - writer.WriteStringValue(Format); - writer.WritePropertyName("synonyms"u8); - writer.WriteStringValue(Synonyms); - if (Optional.IsDefined(EncryptionKey)) - { - if (EncryptionKey != null) - { - writer.WritePropertyName("encryptionKey"u8); - writer.WriteObjectValue(EncryptionKey); - } - else - { - writer.WriteNull("encryptionKey"); - } - } - if (Optional.IsDefined(_etag)) - { - writer.WritePropertyName("@odata.etag"u8); - writer.WriteStringValue(_etag); - } - writer.WriteEndObject(); - } - - internal static SynonymMap DeserializeSynonymMap(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string format = default; - string synonyms = default; - SearchResourceEncryptionKey encryptionKey = default; - string odataEtag = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("format"u8)) - { - format = property.Value.GetString(); - continue; - } - if (property.NameEquals("synonyms"u8)) - { - synonyms = property.Value.GetString(); - continue; - } - if (property.NameEquals("encryptionKey"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - encryptionKey = null; - continue; - } - encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value); - continue; - } - if (property.NameEquals("@odata.etag"u8)) - { - odataEtag = property.Value.GetString(); - continue; - } - } - return new SynonymMap(name, format, synonyms, encryptionKey, odataEtag); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static SynonymMap FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSynonymMap(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.cs deleted file mode 100644 index 4f6cd587015d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymMap.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents a synonym map definition. - public partial class SynonymMap - { - /// Initializes a new instance of . - /// The name of the synonym map. - /// The format of the synonym map. Only the 'solr' format is currently supported. - /// A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - /// The ETag of the synonym map. - internal SynonymMap(string name, string format, string synonyms, SearchResourceEncryptionKey encryptionKey, string etag) - { - Name = name; - Format = format; - Synonyms = synonyms; - EncryptionKey = encryptionKey; - _etag = etag; - } - - /// The name of the synonym map. - public string Name { get; set; } - /// A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. - public string Synonyms { get; set; } - /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. - public SearchResourceEncryptionKey EncryptionKey { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.Serialization.cs deleted file mode 100644 index 9bd6d76229c9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.Serialization.cs +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class SynonymTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("synonyms"u8); - writer.WriteStartArray(); - foreach (var item in Synonyms) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - if (Optional.IsDefined(IgnoreCase)) - { - writer.WritePropertyName("ignoreCase"u8); - writer.WriteBooleanValue(IgnoreCase.Value); - } - if (Optional.IsDefined(Expand)) - { - writer.WritePropertyName("expand"u8); - writer.WriteBooleanValue(Expand.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static SynonymTokenFilter DeserializeSynonymTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList synonyms = default; - bool? ignoreCase = default; - bool? expand = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("synonyms"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - synonyms = array; - continue; - } - if (property.NameEquals("ignoreCase"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - ignoreCase = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("expand"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - expand = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new SynonymTokenFilter(odataType, name, synonyms, ignoreCase, expand); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new SynonymTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeSynonymTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.cs deleted file mode 100644 index d708bd05d2e9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SynonymTokenFilter.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Matches single or multi-word synonyms in a token stream. This token filter is implemented using Apache Lucene. - public partial class SynonymTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of synonyms in following one of two formats: 1. incredible, unbelievable, fabulous => amazing - all terms on the left side of => symbol will be replaced with all terms on its right side; 2. incredible, unbelievable, fabulous, amazing - comma separated list of equivalent words. Set the expand option to change how this list is interpreted. - /// or is null. - public SynonymTokenFilter(string name, IEnumerable synonyms) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(synonyms, nameof(synonyms)); - - Synonyms = synonyms.ToList(); - ODataType = "#Microsoft.Azure.Search.SynonymTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A list of synonyms in following one of two formats: 1. incredible, unbelievable, fabulous => amazing - all terms on the left side of => symbol will be replaced with all terms on its right side; 2. incredible, unbelievable, fabulous, amazing - comma separated list of equivalent words. Set the expand option to change how this list is interpreted. - /// A value indicating whether to case-fold input for matching. Default is false. - /// A value indicating whether all words in the list of synonyms (if => notation is not used) will map to one another. If true, all words in the list of synonyms (if => notation is not used) will map to one another. The following list: incredible, unbelievable, fabulous, amazing is equivalent to: incredible, unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. If false, the following list: incredible, unbelievable, fabulous, amazing will be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. Default is true. - internal SynonymTokenFilter(string oDataType, string name, IList synonyms, bool? ignoreCase, bool? expand) : base(oDataType, name) - { - Synonyms = synonyms; - IgnoreCase = ignoreCase; - Expand = expand; - ODataType = oDataType ?? "#Microsoft.Azure.Search.SynonymTokenFilter"; - } - /// A value indicating whether to case-fold input for matching. Default is false. - public bool? IgnoreCase { get; set; } - /// A value indicating whether all words in the list of synonyms (if => notation is not used) will map to one another. If true, all words in the list of synonyms (if => notation is not used) will map to one another. The following list: incredible, unbelievable, fabulous, amazing is equivalent to: incredible, unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. If false, the following list: incredible, unbelievable, fabulous, amazing will be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. Default is true. - public bool? Expand { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.Serialization.cs deleted file mode 100644 index bb5917b874ef..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.Serialization.cs +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TagScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("tag"u8); - writer.WriteObjectValue(Parameters); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static TagScoringFunction DeserializeTagScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TagScoringParameters tag = default; - string type = default; - string fieldName = default; - double boost = default; - ScoringFunctionInterpolation? interpolation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tag"u8)) - { - tag = TagScoringParameters.DeserializeTagScoringParameters(property.Value); - continue; - } - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("boost"u8)) - { - boost = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("interpolation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - interpolation = property.Value.GetString().ToScoringFunctionInterpolation(); - continue; - } - } - return new TagScoringFunction(type, fieldName, boost, interpolation, tag); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new TagScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTagScoringFunction(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.cs deleted file mode 100644 index 8fbada5b23dd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringFunction.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a function that boosts scores of documents with string values matching a given list of tags. - public partial class TagScoringFunction : ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - /// Parameter values for the tag scoring function. - internal TagScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation, TagScoringParameters parameters) : base(type, fieldName, boost, interpolation) - { - Parameters = parameters; - Type = type ?? "tag"; - } - - /// Parameter values for the tag scoring function. - public TagScoringParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.Serialization.cs deleted file mode 100644 index fd3fa1634069..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TagScoringParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("tagsParameter"u8); - writer.WriteStringValue(TagsParameter); - writer.WriteEndObject(); - } - - internal static TagScoringParameters DeserializeTagScoringParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string tagsParameter = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("tagsParameter"u8)) - { - tagsParameter = property.Value.GetString(); - continue; - } - } - return new TagScoringParameters(tagsParameter); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static TagScoringParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTagScoringParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.cs deleted file mode 100644 index d6c2df0168d7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TagScoringParameters.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Provides parameter values to a tag scoring function. - public partial class TagScoringParameters - { - /// Initializes a new instance of . - /// The name of the parameter passed in search queries to specify the list of tags to compare against the target field. - /// is null. - public TagScoringParameters(string tagsParameter) - { - Argument.AssertNotNull(tagsParameter, nameof(tagsParameter)); - - TagsParameter = tagsParameter; - } - - /// The name of the parameter passed in search queries to specify the list of tags to compare against the target field. - public string TagsParameter { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.Serialization.cs deleted file mode 100644 index e367eaec9f0a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.Serialization.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class TextResult - { - internal static TextResult DeserializeTextResult(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double? searchScore = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("searchScore"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - searchScore = property.Value.GetDouble(); - continue; - } - } - return new TextResult(searchScore); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static TextResult FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTextResult(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.cs deleted file mode 100644 index 10764991dd41..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextResult.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The BM25 or Classic score for the text portion of the query. - public partial class TextResult - { - /// Initializes a new instance of . - internal TextResult() - { - } - - /// Initializes a new instance of . - /// The BM25 or Classic score for the text portion of the query. - internal TextResult(double? searchScore) - { - SearchScore = searchScore; - } - - /// The BM25 or Classic score for the text portion of the query. - public double? SearchScore { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.Serialization.cs deleted file mode 100644 index fb7cf2f99c12..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.Serialization.cs +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TextTranslationSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("defaultToLanguageCode"u8); - writer.WriteStringValue(DefaultToLanguageCode.ToString()); - if (Optional.IsDefined(DefaultFromLanguageCode)) - { - if (DefaultFromLanguageCode != null) - { - writer.WritePropertyName("defaultFromLanguageCode"u8); - writer.WriteStringValue(DefaultFromLanguageCode.Value.ToString()); - } - else - { - writer.WriteNull("defaultFromLanguageCode"); - } - } - if (Optional.IsDefined(SuggestedFrom)) - { - if (SuggestedFrom != null) - { - writer.WritePropertyName("suggestedFrom"u8); - writer.WriteStringValue(SuggestedFrom.Value.ToString()); - } - else - { - writer.WriteNull("suggestedFrom"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static TextTranslationSkill DeserializeTextTranslationSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - TextTranslationSkillLanguage defaultToLanguageCode = default; - TextTranslationSkillLanguage? defaultFromLanguageCode = default; - TextTranslationSkillLanguage? suggestedFrom = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("defaultToLanguageCode"u8)) - { - defaultToLanguageCode = new TextTranslationSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("defaultFromLanguageCode"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultFromLanguageCode = null; - continue; - } - defaultFromLanguageCode = new TextTranslationSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("suggestedFrom"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - suggestedFrom = null; - continue; - } - suggestedFrom = new TextTranslationSkillLanguage(property.Value.GetString()); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new TextTranslationSkill( - odataType, - name, - description, - context, - inputs, - outputs, - defaultToLanguageCode, - defaultFromLanguageCode, - suggestedFrom); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new TextTranslationSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTextTranslationSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.cs deleted file mode 100644 index 7e5f5755f06b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkill.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill to translate text from one language to another. - public partial class TextTranslationSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The language code to translate documents into for documents that don't specify the to language explicitly. - /// or is null. - public TextTranslationSkill(IEnumerable inputs, IEnumerable outputs, TextTranslationSkillLanguage defaultToLanguageCode) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - DefaultToLanguageCode = defaultToLanguageCode; - ODataType = "#Microsoft.Skills.Text.TranslationSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The language code to translate documents into for documents that don't specify the to language explicitly. - /// The language code to translate documents from for documents that don't specify the from language explicitly. - /// The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is `en`. - internal TextTranslationSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, TextTranslationSkillLanguage defaultToLanguageCode, TextTranslationSkillLanguage? defaultFromLanguageCode, TextTranslationSkillLanguage? suggestedFrom) : base(oDataType, name, description, context, inputs, outputs) - { - DefaultToLanguageCode = defaultToLanguageCode; - DefaultFromLanguageCode = defaultFromLanguageCode; - SuggestedFrom = suggestedFrom; - ODataType = oDataType ?? "#Microsoft.Skills.Text.TranslationSkill"; - } - - /// The language code to translate documents into for documents that don't specify the to language explicitly. - public TextTranslationSkillLanguage DefaultToLanguageCode { get; set; } - /// The language code to translate documents from for documents that don't specify the from language explicitly. - public TextTranslationSkillLanguage? DefaultFromLanguageCode { get; set; } - /// The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is `en`. - public TextTranslationSkillLanguage? SuggestedFrom { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.Serialization.cs deleted file mode 100644 index 08f70409fa72..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.Serialization.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TextWeights : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("weights"u8); - writer.WriteStartObject(); - foreach (var item in Weights) - { - writer.WritePropertyName(item.Key); - writer.WriteNumberValue(item.Value); - } - writer.WriteEndObject(); - writer.WriteEndObject(); - } - - internal static TextWeights DeserializeTextWeights(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IDictionary weights = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("weights"u8)) - { - Dictionary dictionary = new Dictionary(); - foreach (var property0 in property.Value.EnumerateObject()) - { - dictionary.Add(property0.Name, property0.Value.GetDouble()); - } - weights = dictionary; - continue; - } - } - return new TextWeights(weights); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static TextWeights FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTextWeights(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.cs deleted file mode 100644 index b421fc9b2529..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextWeights.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines weights on index fields for which matches should boost scoring in search queries. - public partial class TextWeights - { - /// Initializes a new instance of . - /// The dictionary of per-field weights to boost document scoring. The keys are field names and the values are the weights for each field. - /// is null. - public TextWeights(IDictionary weights) - { - Argument.AssertNotNull(weights, nameof(weights)); - - Weights = weights; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.Serialization.cs deleted file mode 100644 index e676e936b542..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.Serialization.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - internal static partial class TokenCharacterKindExtensions - { - public static string ToSerialString(this TokenCharacterKind value) => value switch - { - TokenCharacterKind.Letter => "letter", - TokenCharacterKind.Digit => "digit", - TokenCharacterKind.Whitespace => "whitespace", - TokenCharacterKind.Punctuation => "punctuation", - TokenCharacterKind.Symbol => "symbol", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown TokenCharacterKind value.") - }; - - public static TokenCharacterKind ToTokenCharacterKind(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "letter")) return TokenCharacterKind.Letter; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "digit")) return TokenCharacterKind.Digit; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "whitespace")) return TokenCharacterKind.Whitespace; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "punctuation")) return TokenCharacterKind.Punctuation; - if (StringComparer.OrdinalIgnoreCase.Equals(value, "symbol")) return TokenCharacterKind.Symbol; - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown TokenCharacterKind value."); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.cs deleted file mode 100644 index ef1a54d67c05..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenCharacterKind.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Represents classes of characters on which a token filter can operate. - public enum TokenCharacterKind - { - /// Keeps letters in tokens. - Letter, - /// Keeps digits in tokens. - Digit, - /// Keeps whitespace in tokens. - Whitespace, - /// Keeps punctuation in tokens. - Punctuation, - /// Keeps symbols in tokens. - Symbol - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.Serialization.cs deleted file mode 100644 index 9eff630717f8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.Serialization.cs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static TokenFilter DeserializeTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "#Microsoft.Azure.Search.AsciiFoldingTokenFilter": return AsciiFoldingTokenFilter.DeserializeAsciiFoldingTokenFilter(element); - case "#Microsoft.Azure.Search.CjkBigramTokenFilter": return CjkBigramTokenFilter.DeserializeCjkBigramTokenFilter(element); - case "#Microsoft.Azure.Search.CommonGramTokenFilter": return CommonGramTokenFilter.DeserializeCommonGramTokenFilter(element); - case "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter": return DictionaryDecompounderTokenFilter.DeserializeDictionaryDecompounderTokenFilter(element); - case "#Microsoft.Azure.Search.EdgeNGramTokenFilter": return EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(element); - case "#Microsoft.Azure.Search.EdgeNGramTokenFilterV2": return EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(element); - case "#Microsoft.Azure.Search.ElisionTokenFilter": return ElisionTokenFilter.DeserializeElisionTokenFilter(element); - case "#Microsoft.Azure.Search.KeepTokenFilter": return KeepTokenFilter.DeserializeKeepTokenFilter(element); - case "#Microsoft.Azure.Search.KeywordMarkerTokenFilter": return KeywordMarkerTokenFilter.DeserializeKeywordMarkerTokenFilter(element); - case "#Microsoft.Azure.Search.LengthTokenFilter": return LengthTokenFilter.DeserializeLengthTokenFilter(element); - case "#Microsoft.Azure.Search.LimitTokenFilter": return LimitTokenFilter.DeserializeLimitTokenFilter(element); - case "#Microsoft.Azure.Search.NGramTokenFilter": return NGramTokenFilter.DeserializeNGramTokenFilter(element); - case "#Microsoft.Azure.Search.NGramTokenFilterV2": return NGramTokenFilter.DeserializeNGramTokenFilter(element); - case "#Microsoft.Azure.Search.PatternCaptureTokenFilter": return PatternCaptureTokenFilter.DeserializePatternCaptureTokenFilter(element); - case "#Microsoft.Azure.Search.PatternReplaceTokenFilter": return PatternReplaceTokenFilter.DeserializePatternReplaceTokenFilter(element); - case "#Microsoft.Azure.Search.PhoneticTokenFilter": return PhoneticTokenFilter.DeserializePhoneticTokenFilter(element); - case "#Microsoft.Azure.Search.ShingleTokenFilter": return ShingleTokenFilter.DeserializeShingleTokenFilter(element); - case "#Microsoft.Azure.Search.SnowballTokenFilter": return SnowballTokenFilter.DeserializeSnowballTokenFilter(element); - case "#Microsoft.Azure.Search.StemmerOverrideTokenFilter": return StemmerOverrideTokenFilter.DeserializeStemmerOverrideTokenFilter(element); - case "#Microsoft.Azure.Search.StemmerTokenFilter": return StemmerTokenFilter.DeserializeStemmerTokenFilter(element); - case "#Microsoft.Azure.Search.StopwordsTokenFilter": return StopwordsTokenFilter.DeserializeStopwordsTokenFilter(element); - case "#Microsoft.Azure.Search.SynonymTokenFilter": return SynonymTokenFilter.DeserializeSynonymTokenFilter(element); - case "#Microsoft.Azure.Search.TruncateTokenFilter": return TruncateTokenFilter.DeserializeTruncateTokenFilter(element); - case "#Microsoft.Azure.Search.UniqueTokenFilter": return UniqueTokenFilter.DeserializeUniqueTokenFilter(element); - case "#Microsoft.Azure.Search.WordDelimiterTokenFilter": return WordDelimiterTokenFilter.DeserializeWordDelimiterTokenFilter(element); - } - } - return UnknownTokenFilter.DeserializeUnknownTokenFilter(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static TokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTokenFilter(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.cs deleted file mode 100644 index b5c02b9cc5c5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilter.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Base type for token filters. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , and . - /// - public partial class TokenFilter - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal TokenFilter(string oDataType, string name) - { - ODataType = oDataType; - Name = name; - } - - /// A URI fragment specifying the type of token filter. - internal string ODataType { get; set; } - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - public string Name { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilterName.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilterName.cs deleted file mode 100644 index 4a73a4c73841..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TokenFilterName.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines the names of all token filters supported by the search engine. - public readonly partial struct TokenFilterName : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public TokenFilterName(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string ArabicNormalizationValue = "arabic_normalization"; - private const string ApostropheValue = "apostrophe"; - private const string AsciiFoldingValue = "asciifolding"; - private const string CjkBigramValue = "cjk_bigram"; - private const string CjkWidthValue = "cjk_width"; - private const string ClassicValue = "classic"; - private const string CommonGramValue = "common_grams"; - private const string EdgeNGramValue = "edgeNGram_v2"; - private const string ElisionValue = "elision"; - private const string GermanNormalizationValue = "german_normalization"; - private const string HindiNormalizationValue = "hindi_normalization"; - private const string IndicNormalizationValue = "indic_normalization"; - private const string KeywordRepeatValue = "keyword_repeat"; - private const string KStemValue = "kstem"; - private const string LengthValue = "length"; - private const string LimitValue = "limit"; - private const string LowercaseValue = "lowercase"; - private const string NGramValue = "nGram_v2"; - private const string PersianNormalizationValue = "persian_normalization"; - private const string PhoneticValue = "phonetic"; - private const string PorterStemValue = "porter_stem"; - private const string ReverseValue = "reverse"; - private const string ScandinavianNormalizationValue = "scandinavian_normalization"; - private const string ScandinavianFoldingNormalizationValue = "scandinavian_folding"; - private const string ShingleValue = "shingle"; - private const string SnowballValue = "snowball"; - private const string SoraniNormalizationValue = "sorani_normalization"; - private const string StemmerValue = "stemmer"; - private const string StopwordsValue = "stopwords"; - private const string TrimValue = "trim"; - private const string TruncateValue = "truncate"; - private const string UniqueValue = "unique"; - private const string UppercaseValue = "uppercase"; - private const string WordDelimiterValue = "word_delimiter"; - - /// A token filter that applies the Arabic normalizer to normalize the orthography. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ar/ArabicNormalizationFilter.html. - public static TokenFilterName ArabicNormalization { get; } = new TokenFilterName(ArabicNormalizationValue); - /// Strips all characters after an apostrophe (including the apostrophe itself). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/tr/ApostropheFilter.html. - public static TokenFilterName Apostrophe { get; } = new TokenFilterName(ApostropheValue); - /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html. - public static TokenFilterName AsciiFolding { get; } = new TokenFilterName(AsciiFoldingValue); - /// Forms bigrams of CJK terms that are generated from the standard tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKBigramFilter.html. - public static TokenFilterName CjkBigram { get; } = new TokenFilterName(CjkBigramValue); - /// Normalizes CJK width differences. Folds fullwidth ASCII variants into the equivalent basic Latin, and half-width Katakana variants into the equivalent Kana. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKWidthFilter.html. - public static TokenFilterName CjkWidth { get; } = new TokenFilterName(CjkWidthValue); - /// Removes English possessives, and dots from acronyms. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicFilter.html. - public static TokenFilterName Classic { get; } = new TokenFilterName(ClassicValue); - /// Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/commongrams/CommonGramsFilter.html. - public static TokenFilterName CommonGram { get; } = new TokenFilterName(CommonGramValue); - /// Generates n-grams of the given size(s) starting from the front or the back of an input token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.html. - public static TokenFilterName EdgeNGram { get; } = new TokenFilterName(EdgeNGramValue); - /// Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html. - public static TokenFilterName Elision { get; } = new TokenFilterName(ElisionValue); - /// Normalizes German characters according to the heuristics of the German2 snowball algorithm. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/de/GermanNormalizationFilter.html. - public static TokenFilterName GermanNormalization { get; } = new TokenFilterName(GermanNormalizationValue); - /// Normalizes text in Hindi to remove some differences in spelling variations. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/hi/HindiNormalizationFilter.html. - public static TokenFilterName HindiNormalization { get; } = new TokenFilterName(HindiNormalizationValue); - /// Normalizes the Unicode representation of text in Indian languages. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/in/IndicNormalizationFilter.html. - public static TokenFilterName IndicNormalization { get; } = new TokenFilterName(IndicNormalizationValue); - /// Emits each incoming token twice, once as keyword and once as non-keyword. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/KeywordRepeatFilter.html. - public static TokenFilterName KeywordRepeat { get; } = new TokenFilterName(KeywordRepeatValue); - /// A high-performance kstem filter for English. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/en/KStemFilter.html. - public static TokenFilterName KStem { get; } = new TokenFilterName(KStemValue); - /// Removes words that are too long or too short. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LengthFilter.html. - public static TokenFilterName Length { get; } = new TokenFilterName(LengthValue); - /// Limits the number of tokens while indexing. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LimitTokenCountFilter.html. - public static TokenFilterName Limit { get; } = new TokenFilterName(LimitValue); - /// Normalizes token text to lower case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html. - public static TokenFilterName Lowercase { get; } = new TokenFilterName(LowercaseValue); - /// Generates n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenFilter.html. - public static TokenFilterName NGram { get; } = new TokenFilterName(NGramValue); - /// Applies normalization for Persian. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/fa/PersianNormalizationFilter.html. - public static TokenFilterName PersianNormalization { get; } = new TokenFilterName(PersianNormalizationValue); - /// Create tokens for phonetic matches. See https://lucene.apache.org/core/4_10_3/analyzers-phonetic/org/apache/lucene/analysis/phonetic/package-tree.html. - public static TokenFilterName Phonetic { get; } = new TokenFilterName(PhoneticValue); - /// Uses the Porter stemming algorithm to transform the token stream. See http://tartarus.org/~martin/PorterStemmer. - public static TokenFilterName PorterStem { get; } = new TokenFilterName(PorterStemValue); - /// Reverses the token string. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html. - public static TokenFilterName Reverse { get; } = new TokenFilterName(ReverseValue); - /// Normalizes use of the interchangeable Scandinavian characters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianNormalizationFilter.html. - public static TokenFilterName ScandinavianNormalization { get; } = new TokenFilterName(ScandinavianNormalizationValue); - /// Folds Scandinavian characters åÅäæÄÆ->a and öÖøØ->o. It also discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just the first one. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianFoldingFilter.html. - public static TokenFilterName ScandinavianFoldingNormalization { get; } = new TokenFilterName(ScandinavianFoldingNormalizationValue); - /// Creates combinations of tokens as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/shingle/ShingleFilter.html. - public static TokenFilterName Shingle { get; } = new TokenFilterName(ShingleValue); - /// A filter that stems words using a Snowball-generated stemmer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/snowball/SnowballFilter.html. - public static TokenFilterName Snowball { get; } = new TokenFilterName(SnowballValue); - /// Normalizes the Unicode representation of Sorani text. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ckb/SoraniNormalizationFilter.html. - public static TokenFilterName SoraniNormalization { get; } = new TokenFilterName(SoraniNormalizationValue); - /// Language specific stemming filter. See https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#TokenFilters. - public static TokenFilterName Stemmer { get; } = new TokenFilterName(StemmerValue); - /// Removes stop words from a token stream. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopFilter.html. - public static TokenFilterName Stopwords { get; } = new TokenFilterName(StopwordsValue); - /// Trims leading and trailing whitespace from tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TrimFilter.html. - public static TokenFilterName Trim { get; } = new TokenFilterName(TrimValue); - /// Truncates the terms to a specific length. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilter.html. - public static TokenFilterName Truncate { get; } = new TokenFilterName(TruncateValue); - /// Filters out tokens with same text as the previous token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/RemoveDuplicatesTokenFilter.html. - public static TokenFilterName Unique { get; } = new TokenFilterName(UniqueValue); - /// Normalizes token text to upper case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html. - public static TokenFilterName Uppercase { get; } = new TokenFilterName(UppercaseValue); - /// Splits words into subwords and performs optional transformations on subword groups. - public static TokenFilterName WordDelimiter { get; } = new TokenFilterName(WordDelimiterValue); - /// Determines if two values are the same. - public static bool operator ==(TokenFilterName left, TokenFilterName right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(TokenFilterName left, TokenFilterName right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator TokenFilterName(string value) => new TokenFilterName(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is TokenFilterName other && Equals(other); - /// - public bool Equals(TokenFilterName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.Serialization.cs deleted file mode 100644 index 85eb8b4f3c9c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class TruncateTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Length)) - { - writer.WritePropertyName("length"u8); - writer.WriteNumberValue(Length.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static TruncateTokenFilter DeserializeTruncateTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? length = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("length"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - length = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new TruncateTokenFilter(odataType, name, length); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new TruncateTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeTruncateTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.cs deleted file mode 100644 index 42ee37137509..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TruncateTokenFilter.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Truncates the terms to a specific length. This token filter is implemented using Apache Lucene. - public partial class TruncateTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public TruncateTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.TruncateTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The length at which terms will be truncated. Default and maximum is 300. - internal TruncateTokenFilter(string oDataType, string name, int? length) : base(oDataType, name) - { - Length = length; - ODataType = oDataType ?? "#Microsoft.Azure.Search.TruncateTokenFilter"; - } - - /// The length at which terms will be truncated. Default and maximum is 300. - public int? Length { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.Serialization.cs deleted file mode 100644 index 6e0b91dabb00..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class UaxUrlEmailTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(MaxTokenLength)) - { - writer.WritePropertyName("maxTokenLength"u8); - writer.WriteNumberValue(MaxTokenLength.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UaxUrlEmailTokenizer DeserializeUaxUrlEmailTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - int? maxTokenLength = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("maxTokenLength"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - maxTokenLength = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UaxUrlEmailTokenizer(odataType, name, maxTokenLength); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UaxUrlEmailTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUaxUrlEmailTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.cs deleted file mode 100644 index 429e17a879a0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UaxUrlEmailTokenizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Tokenizes urls and emails as one token. This tokenizer is implemented using Apache Lucene. - public partial class UaxUrlEmailTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public UaxUrlEmailTokenizer(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - internal UaxUrlEmailTokenizer(string oDataType, string name, int? maxTokenLength) : base(oDataType, name) - { - MaxTokenLength = maxTokenLength; - ODataType = oDataType ?? "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; - } - - /// The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. - public int? MaxTokenLength { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.Serialization.cs deleted file mode 100644 index bce5f7022be8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class UniqueTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(OnlyOnSamePosition)) - { - writer.WritePropertyName("onlyOnSamePosition"u8); - writer.WriteBooleanValue(OnlyOnSamePosition.Value); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UniqueTokenFilter DeserializeUniqueTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? onlyOnSamePosition = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("onlyOnSamePosition"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - onlyOnSamePosition = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UniqueTokenFilter(odataType, name, onlyOnSamePosition); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UniqueTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUniqueTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.cs deleted file mode 100644 index 55d5b9653eab..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UniqueTokenFilter.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Filters out tokens with same text as the previous token. This token filter is implemented using Apache Lucene. - public partial class UniqueTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public UniqueTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ODataType = "#Microsoft.Azure.Search.UniqueTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A value indicating whether to remove duplicates only at the same position. Default is false. - internal UniqueTokenFilter(string oDataType, string name, bool? onlyOnSamePosition) : base(oDataType, name) - { - OnlyOnSamePosition = onlyOnSamePosition; - ODataType = oDataType ?? "#Microsoft.Azure.Search.UniqueTokenFilter"; - } - - /// A value indicating whether to remove duplicates only at the same position. Default is false. - public bool? OnlyOnSamePosition { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.Serialization.cs deleted file mode 100644 index 816a3e6b6959..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownCharFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UnknownCharFilter DeserializeUnknownCharFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UnknownCharFilter(odataType, name); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownCharFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownCharFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.cs deleted file mode 100644 index e51658211389..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCharFilter.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of CharFilter. - internal partial class UnknownCharFilter : CharFilter - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of char filter. - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal UnknownCharFilter(string oDataType, string name) : base(oDataType, name) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.Serialization.cs deleted file mode 100644 index 9795bb6b6b0b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.Serialization.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownCognitiveServicesAccount : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - writer.WriteEndObject(); - } - - internal static UnknownCognitiveServicesAccount DeserializeUnknownCognitiveServicesAccount(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string description = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - } - return new UnknownCognitiveServicesAccount(odataType, description); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownCognitiveServicesAccount FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownCognitiveServicesAccount(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.cs deleted file mode 100644 index 9de01138d09a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownCognitiveServicesAccount.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of CognitiveServicesAccount. - internal partial class UnknownCognitiveServicesAccount : CognitiveServicesAccount - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of Azure AI service resource attached to a skillset. - /// Description of the Azure AI service resource attached to a skillset. - internal UnknownCognitiveServicesAccount(string oDataType, string description) : base(oDataType, description) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.Serialization.cs deleted file mode 100644 index e8fa65e45478..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownDataChangeDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static UnknownDataChangeDetectionPolicy DeserializeUnknownDataChangeDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new UnknownDataChangeDetectionPolicy(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownDataChangeDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownDataChangeDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.cs deleted file mode 100644 index 5bb9124cabd1..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataChangeDetectionPolicy.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of DataChangeDetectionPolicy. - internal partial class UnknownDataChangeDetectionPolicy : DataChangeDetectionPolicy - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of data change detection policy. - internal UnknownDataChangeDetectionPolicy(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.Serialization.cs deleted file mode 100644 index d5804aab4cae..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownDataDeletionDetectionPolicy : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static UnknownDataDeletionDetectionPolicy DeserializeUnknownDataDeletionDetectionPolicy(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new UnknownDataDeletionDetectionPolicy(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownDataDeletionDetectionPolicy FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownDataDeletionDetectionPolicy(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.cs deleted file mode 100644 index 45bef6c3d413..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownDataDeletionDetectionPolicy.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of DataDeletionDetectionPolicy. - internal partial class UnknownDataDeletionDetectionPolicy : DataDeletionDetectionPolicy - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of data deletion detection policy. - internal UnknownDataDeletionDetectionPolicy(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.Serialization.cs deleted file mode 100644 index 0db371215532..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownLexicalAnalyzer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UnknownLexicalAnalyzer DeserializeUnknownLexicalAnalyzer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UnknownLexicalAnalyzer(odataType, name); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownLexicalAnalyzer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownLexicalAnalyzer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.cs deleted file mode 100644 index 6e7bb7c85ed4..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalAnalyzer.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of LexicalAnalyzer. - internal partial class UnknownLexicalAnalyzer : LexicalAnalyzer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal UnknownLexicalAnalyzer(string oDataType, string name) : base(oDataType, name) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.Serialization.cs deleted file mode 100644 index 61a3da87b3b5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownLexicalNormalizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UnknownLexicalNormalizer DeserializeUnknownLexicalNormalizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UnknownLexicalNormalizer(odataType, name); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownLexicalNormalizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownLexicalNormalizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.cs deleted file mode 100644 index 0805ebc85bc6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalNormalizer.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of LexicalNormalizer. - internal partial class UnknownLexicalNormalizer : LexicalNormalizer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of normalizer. - /// The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. - internal UnknownLexicalNormalizer(string oDataType, string name) : base(oDataType, name) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.Serialization.cs deleted file mode 100644 index 2b19106b4303..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownLexicalTokenizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UnknownLexicalTokenizer DeserializeUnknownLexicalTokenizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UnknownLexicalTokenizer(odataType, name); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownLexicalTokenizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownLexicalTokenizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.cs deleted file mode 100644 index 9226cd60fbe0..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownLexicalTokenizer.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of LexicalTokenizer. - internal partial class UnknownLexicalTokenizer : LexicalTokenizer - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal UnknownLexicalTokenizer(string oDataType, string name) : base(oDataType, name) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.Serialization.cs deleted file mode 100644 index 94a745105580..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.Serialization.cs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownScoringFunction : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("type"u8); - writer.WriteStringValue(Type); - writer.WritePropertyName("fieldName"u8); - writer.WriteStringValue(FieldName); - writer.WritePropertyName("boost"u8); - writer.WriteNumberValue(Boost); - if (Optional.IsDefined(Interpolation)) - { - writer.WritePropertyName("interpolation"u8); - writer.WriteStringValue(Interpolation.Value.ToSerialString()); - } - writer.WriteEndObject(); - } - - internal static UnknownScoringFunction DeserializeUnknownScoringFunction(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string type = "Unknown"; - string fieldName = default; - double boost = default; - ScoringFunctionInterpolation? interpolation = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("type"u8)) - { - type = property.Value.GetString(); - continue; - } - if (property.NameEquals("fieldName"u8)) - { - fieldName = property.Value.GetString(); - continue; - } - if (property.NameEquals("boost"u8)) - { - boost = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("interpolation"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - interpolation = property.Value.GetString().ToScoringFunctionInterpolation(); - continue; - } - } - return new UnknownScoringFunction(type, fieldName, boost, interpolation); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownScoringFunction FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownScoringFunction(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.cs deleted file mode 100644 index be7ecea1451b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownScoringFunction.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of ScoringFunction. - internal partial class UnknownScoringFunction : ScoringFunction - { - /// Initializes a new instance of . - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - internal UnknownScoringFunction(string type, string fieldName, double boost, ScoringFunctionInterpolation? interpolation) : base(type, fieldName, boost, interpolation) - { - Type = type ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.Serialization.cs deleted file mode 100644 index b72d66c1d05b..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownSearchIndexerDataIdentity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static UnknownSearchIndexerDataIdentity DeserializeUnknownSearchIndexerDataIdentity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new UnknownSearchIndexerDataIdentity(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownSearchIndexerDataIdentity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownSearchIndexerDataIdentity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.cs deleted file mode 100644 index 157a2a33f0ae..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerDataIdentity.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of SearchIndexerDataIdentity. - internal partial class UnknownSearchIndexerDataIdentity : SearchIndexerDataIdentity - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of identity. - internal UnknownSearchIndexerDataIdentity(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.Serialization.cs deleted file mode 100644 index d11847d96a18..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.Serialization.cs +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownSearchIndexerSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static UnknownSearchIndexerSkill DeserializeUnknownSearchIndexerSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new UnknownSearchIndexerSkill( - odataType, - name, - description, - context, - inputs, - outputs); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownSearchIndexerSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownSearchIndexerSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.cs deleted file mode 100644 index b64f1b8f6216..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSearchIndexerSkill.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of SearchIndexerSkill. - internal partial class UnknownSearchIndexerSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - internal UnknownSearchIndexerSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs) : base(oDataType, name, description, context, inputs, outputs) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.Serialization.cs deleted file mode 100644 index 3499b7c9818e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.Serialization.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownSimilarity : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WriteEndObject(); - } - - internal static UnknownSimilarity DeserializeUnknownSimilarity(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - } - return new UnknownSimilarity(odataType); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownSimilarity FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownSimilarity(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.cs deleted file mode 100644 index cd61c179185e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownSimilarity.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of Similarity. - internal partial class UnknownSimilarity : SimilarityAlgorithm - { - /// Initializes a new instance of . - /// - internal UnknownSimilarity(string oDataType) : base(oDataType) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.Serialization.cs deleted file mode 100644 index 05977962f831..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static UnknownTokenFilter DeserializeUnknownTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string odataType = "Unknown"; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new UnknownTokenFilter(odataType, name); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.cs deleted file mode 100644 index 6e5c1f4dcd72..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownTokenFilter.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of TokenFilter. - internal partial class UnknownTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - internal UnknownTokenFilter(string oDataType, string name) : base(oDataType, name) - { - ODataType = oDataType ?? "Unknown"; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.Serialization.cs deleted file mode 100644 index d63f388ad166..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.Serialization.cs +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownVectorQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static UnknownVectorQuery DeserializeUnknownVectorQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - VectorQueryKind kind = "Unknown"; - int? k = default; - string fields = default; - bool? exhaustive = default; - double? oversampling = default; - float? weight = default; - VectorThreshold threshold = default; - string filterOverride = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("kind"u8)) - { - kind = new VectorQueryKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("k"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - k = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("fields"u8)) - { - fields = property.Value.GetString(); - continue; - } - if (property.NameEquals("exhaustive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("oversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - oversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("weight"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - weight = property.Value.GetSingle(); - continue; - } - if (property.NameEquals("threshold"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = VectorThreshold.DeserializeVectorThreshold(property.Value); - continue; - } - if (property.NameEquals("filterOverride"u8)) - { - filterOverride = property.Value.GetString(); - continue; - } - } - return new UnknownVectorQuery( - kind, - k, - fields, - exhaustive, - oversampling, - weight, - threshold, - filterOverride); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownVectorQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownVectorQuery(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.cs deleted file mode 100644 index e9909cf5d413..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorQuery.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of VectorQuery. - internal partial class UnknownVectorQuery : VectorQuery - { - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - internal UnknownVectorQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride) : base(kind, kNearestNeighborsCount, fieldsRaw, exhaustive, oversampling, weight, threshold, filterOverride) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs deleted file mode 100644 index 82607187e8c3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownVectorSearchAlgorithmConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static UnknownVectorSearchAlgorithmConfiguration DeserializeUnknownVectorSearchAlgorithmConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - VectorSearchAlgorithmKind kind = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchAlgorithmKind(property.Value.GetString()); - continue; - } - } - return new UnknownVectorSearchAlgorithmConfiguration(name, kind); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownVectorSearchAlgorithmConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownVectorSearchAlgorithmConfiguration(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.cs deleted file mode 100644 index 0b83b982774a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchAlgorithmConfiguration.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of VectorSearchAlgorithmConfiguration. - internal partial class UnknownVectorSearchAlgorithmConfiguration : VectorSearchAlgorithmConfiguration - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of algorithm being configured for use with vector search. - internal UnknownVectorSearchAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind) : base(name, kind) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.Serialization.cs deleted file mode 100644 index 8b2b81021366..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.Serialization.cs +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownVectorSearchCompression : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(CompressionName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(RerankWithOriginalVectors)) - { - writer.WritePropertyName("rerankWithOriginalVectors"u8); - writer.WriteBooleanValue(RerankWithOriginalVectors.Value); - } - if (Optional.IsDefined(DefaultOversampling)) - { - if (DefaultOversampling != null) - { - writer.WritePropertyName("defaultOversampling"u8); - writer.WriteNumberValue(DefaultOversampling.Value); - } - else - { - writer.WriteNull("defaultOversampling"); - } - } - if (Optional.IsDefined(RescoringOptions)) - { - if (RescoringOptions != null) - { - writer.WritePropertyName("rescoringOptions"u8); - writer.WriteObjectValue(RescoringOptions); - } - else - { - writer.WriteNull("rescoringOptions"); - } - } - if (Optional.IsDefined(TruncationDimension)) - { - if (TruncationDimension != null) - { - writer.WritePropertyName("truncationDimension"u8); - writer.WriteNumberValue(TruncationDimension.Value); - } - else - { - writer.WriteNull("truncationDimension"); - } - } - writer.WriteEndObject(); - } - - internal static UnknownVectorSearchCompression DeserializeUnknownVectorSearchCompression(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - VectorSearchCompressionKind kind = "Unknown"; - bool? rerankWithOriginalVectors = default; - double? defaultOversampling = default; - RescoringOptions rescoringOptions = default; - int? truncationDimension = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchCompressionKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("rerankWithOriginalVectors"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - rerankWithOriginalVectors = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("defaultOversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - defaultOversampling = null; - continue; - } - defaultOversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("rescoringOptions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - rescoringOptions = null; - continue; - } - rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value); - continue; - } - if (property.NameEquals("truncationDimension"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - truncationDimension = null; - continue; - } - truncationDimension = property.Value.GetInt32(); - continue; - } - } - return new UnknownVectorSearchCompression( - name, - kind, - rerankWithOriginalVectors, - defaultOversampling, - rescoringOptions, - truncationDimension); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownVectorSearchCompression FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownVectorSearchCompression(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs deleted file mode 100644 index 612fd27720b5..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of VectorSearchCompression. - internal partial class UnknownVectorSearchCompression : VectorSearchCompression - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. - /// Contains the options for rescoring. - /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. - internal UnknownVectorSearchCompression(string compressionName, VectorSearchCompressionKind kind, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension) : base(compressionName, kind, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.Serialization.cs deleted file mode 100644 index c40b834241ad..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.Serialization.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownVectorSearchVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static UnknownVectorSearchVectorizer DeserializeUnknownVectorSearchVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - VectorSearchVectorizerKind kind = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchVectorizerKind(property.Value.GetString()); - continue; - } - } - return new UnknownVectorSearchVectorizer(name, kind); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownVectorSearchVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownVectorSearchVectorizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.cs deleted file mode 100644 index 6ade9464f87f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchVectorizer.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of VectorSearchVectorizer. - internal partial class UnknownVectorSearchVectorizer : VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - internal UnknownVectorSearchVectorizer(string vectorizerName, VectorSearchVectorizerKind kind) : base(vectorizerName, kind) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.Serialization.cs deleted file mode 100644 index 1ba081a81126..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.Serialization.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - internal partial class UnknownVectorThreshold : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static UnknownVectorThreshold DeserializeUnknownVectorThreshold(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - VectorThresholdKind kind = "Unknown"; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("kind"u8)) - { - kind = new VectorThresholdKind(property.Value.GetString()); - continue; - } - } - return new UnknownVectorThreshold(kind); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new UnknownVectorThreshold FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeUnknownVectorThreshold(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.cs deleted file mode 100644 index d67082e14cae..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorThreshold.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// Unknown version of VectorThreshold. - internal partial class UnknownVectorThreshold : VectorThreshold - { - /// Initializes a new instance of . - /// The kind of threshold used to filter vector queries. - internal UnknownVectorThreshold(VectorThresholdKind kind) : base(kind) - { - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.Serialization.cs deleted file mode 100644 index ac3e6b6903c9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.Serialization.cs +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static VectorQuery DeserializeVectorQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("kind", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "imageBinary": return VectorizableImageBinaryQuery.DeserializeVectorizableImageBinaryQuery(element); - case "imageUrl": return VectorizableImageUrlQuery.DeserializeVectorizableImageUrlQuery(element); - case "text": return VectorizableTextQuery.DeserializeVectorizableTextQuery(element); - case "vector": return VectorizedQuery.DeserializeVectorizedQuery(element); - } - } - return UnknownVectorQuery.DeserializeUnknownVectorQuery(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorQuery(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.cs deleted file mode 100644 index 94a1b9283bef..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQuery.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// - /// The query parameters for vector and hybrid search queries. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public abstract partial class VectorQuery - { - /// Initializes a new instance of . - protected VectorQuery() - { - } - - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - internal VectorQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride) - { - Kind = kind; - KNearestNeighborsCount = kNearestNeighborsCount; - FieldsRaw = fieldsRaw; - Exhaustive = exhaustive; - Oversampling = oversampling; - Weight = weight; - Threshold = threshold; - FilterOverride = filterOverride; - } - - /// The kind of vector query being performed. - internal VectorQueryKind Kind { get; set; } - /// Number of nearest neighbors to return as top hits. - public int? KNearestNeighborsCount { get; set; } - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - public bool? Exhaustive { get; set; } - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - public double? Oversampling { get; set; } - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - public float? Weight { get; set; } - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public VectorThreshold Threshold { get; set; } - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - public string FilterOverride { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.Serialization.cs deleted file mode 100644 index 82c5b739a265..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.Serialization.cs +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VectorSearch : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsCollectionDefined(Profiles)) - { - writer.WritePropertyName("profiles"u8); - writer.WriteStartArray(); - foreach (var item in Profiles) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Algorithms)) - { - writer.WritePropertyName("algorithms"u8); - writer.WriteStartArray(); - foreach (var item in Algorithms) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Vectorizers)) - { - writer.WritePropertyName("vectorizers"u8); - writer.WriteStartArray(); - foreach (var item in Vectorizers) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - if (Optional.IsCollectionDefined(Compressions)) - { - writer.WritePropertyName("compressions"u8); - writer.WriteStartArray(); - foreach (var item in Compressions) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } - - internal static VectorSearch DeserializeVectorSearch(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IList profiles = default; - IList algorithms = default; - IList vectorizers = default; - IList compressions = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("profiles"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(VectorSearchProfile.DeserializeVectorSearchProfile(item)); - } - profiles = array; - continue; - } - if (property.NameEquals("algorithms"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(VectorSearchAlgorithmConfiguration.DeserializeVectorSearchAlgorithmConfiguration(item)); - } - algorithms = array; - continue; - } - if (property.NameEquals("vectorizers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(VectorSearchVectorizer.DeserializeVectorSearchVectorizer(item)); - } - vectorizers = array; - continue; - } - if (property.NameEquals("compressions"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(VectorSearchCompression.DeserializeVectorSearchCompression(item)); - } - compressions = array; - continue; - } - } - return new VectorSearch(profiles ?? new ChangeTrackingList(), algorithms ?? new ChangeTrackingList(), vectorizers ?? new ChangeTrackingList(), compressions ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorSearch FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSearch(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.cs deleted file mode 100644 index c54f09fa71c6..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearch.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Contains configuration options related to vector search. - public partial class VectorSearch - { - /// Initializes a new instance of . - public VectorSearch() - { - Profiles = new ChangeTrackingList(); - Algorithms = new ChangeTrackingList(); - Vectorizers = new ChangeTrackingList(); - Compressions = new ChangeTrackingList(); - } - - /// Initializes a new instance of . - /// Defines combinations of configurations to use with vector search. - /// - /// Contains configuration options specific to the algorithm used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// - /// Contains configuration options on how to vectorize text vector queries. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - /// - /// Contains configuration options specific to the compression method used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal VectorSearch(IList profiles, IList algorithms, IList vectorizers, IList compressions) - { - Profiles = profiles; - Algorithms = algorithms; - Vectorizers = vectorizers; - Compressions = compressions; - } - - /// Defines combinations of configurations to use with vector search. - public IList Profiles { get; } - /// - /// Contains configuration options specific to the algorithm used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public IList Algorithms { get; } - /// - /// Contains configuration options on how to vectorize text vector queries. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public IList Vectorizers { get; } - /// - /// Contains configuration options specific to the compression method used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public IList Compressions { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.Serialization.cs deleted file mode 100644 index 3009dc29d84c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.Serialization.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VectorSearchAlgorithmConfiguration : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static VectorSearchAlgorithmConfiguration DeserializeVectorSearchAlgorithmConfiguration(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("kind", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "exhaustiveKnn": return ExhaustiveKnnAlgorithmConfiguration.DeserializeExhaustiveKnnAlgorithmConfiguration(element); - case "hnsw": return HnswAlgorithmConfiguration.DeserializeHnswAlgorithmConfiguration(element); - } - } - return UnknownVectorSearchAlgorithmConfiguration.DeserializeUnknownVectorSearchAlgorithmConfiguration(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorSearchAlgorithmConfiguration FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.cs deleted file mode 100644 index 45e666a245a2..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmConfiguration.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Contains configuration options specific to the algorithm used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public abstract partial class VectorSearchAlgorithmConfiguration - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - protected VectorSearchAlgorithmConfiguration(string name) - { - Argument.AssertNotNull(name, nameof(name)); - - Name = name; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of algorithm being configured for use with vector search. - internal VectorSearchAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind) - { - Name = name; - Kind = kind; - } - - /// The name to associate with this particular configuration. - public string Name { get; set; } - /// The name of the kind of algorithm being configured for use with vector search. - internal VectorSearchAlgorithmKind Kind { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmMetric.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmMetric.cs deleted file mode 100644 index baf8c7e337df..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmMetric.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.ComponentModel; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// The similarity metric to use for vector comparisons. It is recommended to choose the same similarity metric as the embedding model was trained on. - public readonly partial struct VectorSearchAlgorithmMetric : IEquatable - { - private readonly string _value; - - /// Initializes a new instance of . - /// is null. - public VectorSearchAlgorithmMetric(string value) - { - _value = value ?? throw new ArgumentNullException(nameof(value)); - } - - private const string CosineValue = "cosine"; - private const string EuclideanValue = "euclidean"; - private const string DotProductValue = "dotProduct"; - private const string HammingValue = "hamming"; - - /// Measures the angle between vectors to quantify their similarity, disregarding magnitude. The smaller the angle, the closer the similarity. - public static VectorSearchAlgorithmMetric Cosine { get; } = new VectorSearchAlgorithmMetric(CosineValue); - /// Computes the straight-line distance between vectors in a multi-dimensional space. The smaller the distance, the closer the similarity. - public static VectorSearchAlgorithmMetric Euclidean { get; } = new VectorSearchAlgorithmMetric(EuclideanValue); - /// Calculates the sum of element-wise products to gauge alignment and magnitude similarity. The larger and more positive, the closer the similarity. - public static VectorSearchAlgorithmMetric DotProduct { get; } = new VectorSearchAlgorithmMetric(DotProductValue); - /// Only applicable to bit-packed binary data types. Determines dissimilarity by counting differing positions in binary vectors. The fewer differences, the closer the similarity. - public static VectorSearchAlgorithmMetric Hamming { get; } = new VectorSearchAlgorithmMetric(HammingValue); - /// Determines if two values are the same. - public static bool operator ==(VectorSearchAlgorithmMetric left, VectorSearchAlgorithmMetric right) => left.Equals(right); - /// Determines if two values are not the same. - public static bool operator !=(VectorSearchAlgorithmMetric left, VectorSearchAlgorithmMetric right) => !left.Equals(right); - /// Converts a to a . - public static implicit operator VectorSearchAlgorithmMetric(string value) => new VectorSearchAlgorithmMetric(value); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is VectorSearchAlgorithmMetric other && Equals(other); - /// - public bool Equals(VectorSearchAlgorithmMetric other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - /// - public override string ToString() => _value; - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.Serialization.cs deleted file mode 100644 index 303a8552a462..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.Serialization.cs +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VectorSearchCompression : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(CompressionName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(RerankWithOriginalVectors)) - { - writer.WritePropertyName("rerankWithOriginalVectors"u8); - writer.WriteBooleanValue(RerankWithOriginalVectors.Value); - } - if (Optional.IsDefined(DefaultOversampling)) - { - if (DefaultOversampling != null) - { - writer.WritePropertyName("defaultOversampling"u8); - writer.WriteNumberValue(DefaultOversampling.Value); - } - else - { - writer.WriteNull("defaultOversampling"); - } - } - if (Optional.IsDefined(RescoringOptions)) - { - if (RescoringOptions != null) - { - writer.WritePropertyName("rescoringOptions"u8); - writer.WriteObjectValue(RescoringOptions); - } - else - { - writer.WriteNull("rescoringOptions"); - } - } - if (Optional.IsDefined(TruncationDimension)) - { - if (TruncationDimension != null) - { - writer.WritePropertyName("truncationDimension"u8); - writer.WriteNumberValue(TruncationDimension.Value); - } - else - { - writer.WriteNull("truncationDimension"); - } - } - writer.WriteEndObject(); - } - - internal static VectorSearchCompression DeserializeVectorSearchCompression(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("kind", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "binaryQuantization": return BinaryQuantizationCompression.DeserializeBinaryQuantizationCompression(element); - case "scalarQuantization": return ScalarQuantizationCompression.DeserializeScalarQuantizationCompression(element); - } - } - return UnknownVectorSearchCompression.DeserializeUnknownVectorSearchCompression(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorSearchCompression FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSearchCompression(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs deleted file mode 100644 index d0b140a9f607..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Contains configuration options specific to the compression method used during indexing or querying. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public abstract partial class VectorSearchCompression - { - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// is null. - protected VectorSearchCompression(string compressionName) - { - Argument.AssertNotNull(compressionName, nameof(compressionName)); - - CompressionName = compressionName; - } - - /// Initializes a new instance of . - /// The name to associate with this particular configuration. - /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. - /// Contains the options for rescoring. - /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. - internal VectorSearchCompression(string compressionName, VectorSearchCompressionKind kind, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension) - { - CompressionName = compressionName; - Kind = kind; - RerankWithOriginalVectors = rerankWithOriginalVectors; - DefaultOversampling = defaultOversampling; - RescoringOptions = rescoringOptions; - TruncationDimension = truncationDimension; - } - /// The name of the kind of compression method being configured for use with vector search. - internal VectorSearchCompressionKind Kind { get; set; } - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - public bool? RerankWithOriginalVectors { get; set; } - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. - public double? DefaultOversampling { get; set; } - /// Contains the options for rescoring. - public RescoringOptions RescoringOptions { get; set; } - /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. - public int? TruncationDimension { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.Serialization.cs deleted file mode 100644 index 7d1ecb943e04..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.Serialization.cs +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VectorSearchProfile : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WritePropertyName("algorithm"u8); - writer.WriteStringValue(AlgorithmConfigurationName); - if (Optional.IsDefined(VectorizerName)) - { - writer.WritePropertyName("vectorizer"u8); - writer.WriteStringValue(VectorizerName); - } - if (Optional.IsDefined(CompressionName)) - { - writer.WritePropertyName("compression"u8); - writer.WriteStringValue(CompressionName); - } - writer.WriteEndObject(); - } - - internal static VectorSearchProfile DeserializeVectorSearchProfile(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string name = default; - string algorithm = default; - string vectorizer = default; - string compression = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("algorithm"u8)) - { - algorithm = property.Value.GetString(); - continue; - } - if (property.NameEquals("vectorizer"u8)) - { - vectorizer = property.Value.GetString(); - continue; - } - if (property.NameEquals("compression"u8)) - { - compression = property.Value.GetString(); - continue; - } - } - return new VectorSearchProfile(name, algorithm, vectorizer, compression); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorSearchProfile FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSearchProfile(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.cs deleted file mode 100644 index fc545f7077ba..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchProfile.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Defines a combination of configurations to use with vector search. - public partial class VectorSearchProfile - { - /// Initializes a new instance of . - /// The name to associate with this particular vector search profile. - /// The name of the vector search algorithm configuration that specifies the algorithm and optional parameters. - /// or is null. - public VectorSearchProfile(string name, string algorithmConfigurationName) - { - Argument.AssertNotNull(name, nameof(name)); - Argument.AssertNotNull(algorithmConfigurationName, nameof(algorithmConfigurationName)); - - Name = name; - AlgorithmConfigurationName = algorithmConfigurationName; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vector search profile. - /// The name of the vector search algorithm configuration that specifies the algorithm and optional parameters. - /// The name of the vectorization being configured for use with vector search. - /// The name of the compression method configuration that specifies the compression method and optional parameters. - internal VectorSearchProfile(string name, string algorithmConfigurationName, string vectorizerName, string compressionName) - { - Name = name; - AlgorithmConfigurationName = algorithmConfigurationName; - VectorizerName = vectorizerName; - CompressionName = compressionName; - } - - /// The name to associate with this particular vector search profile. - public string Name { get; set; } - /// The name of the vector search algorithm configuration that specifies the algorithm and optional parameters. - public string AlgorithmConfigurationName { get; set; } - /// The name of the vectorization being configured for use with vector search. - public string VectorizerName { get; set; } - /// The name of the compression method configuration that specifies the compression method and optional parameters. - public string CompressionName { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.Serialization.cs deleted file mode 100644 index ec0d340d8b41..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.Serialization.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VectorSearchVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static VectorSearchVectorizer DeserializeVectorSearchVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("kind", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "aiServicesVision": return AIServicesVisionVectorizer.DeserializeAIServicesVisionVectorizer(element); - case "aml": return AzureMachineLearningVectorizer.DeserializeAzureMachineLearningVectorizer(element); - case "azureOpenAI": return AzureOpenAIVectorizer.DeserializeAzureOpenAIVectorizer(element); - case "customWebApi": return WebApiVectorizer.DeserializeWebApiVectorizer(element); - } - } - return UnknownVectorSearchVectorizer.DeserializeUnknownVectorSearchVectorizer(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorSearchVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSearchVectorizer(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.cs deleted file mode 100644 index 06fbd0664c38..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizer.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// - /// Specifies the vectorization method to be used during query time. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , and . - /// - public abstract partial class VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// is null. - protected VectorSearchVectorizer(string vectorizerName) - { - Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); - - VectorizerName = vectorizerName; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - internal VectorSearchVectorizer(string vectorizerName, VectorSearchVectorizerKind kind) - { - VectorizerName = vectorizerName; - Kind = kind; - } - /// The name of the kind of vectorization method being configured for use with vector search. - internal VectorSearchVectorizerKind Kind { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.Serialization.cs deleted file mode 100644 index a9450d15337e..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.Serialization.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorSimilarityThreshold : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("value"u8); - writer.WriteNumberValue(Value); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static VectorSimilarityThreshold DeserializeVectorSimilarityThreshold(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - double value = default; - VectorThresholdKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("value"u8)) - { - value = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorThresholdKind(property.Value.GetString()); - continue; - } - } - return new VectorSimilarityThreshold(kind, value); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VectorSimilarityThreshold FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorSimilarityThreshold(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.cs deleted file mode 100644 index 404a47badb0a..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSimilarityThreshold.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The results of the vector query will be filtered based on the vector similarity metric. Note this is the canonical definition of similarity metric, not the 'distance' version. The threshold direction (larger or smaller) will be chosen automatically according to the metric used by the field. - public partial class VectorSimilarityThreshold : VectorThreshold - { - /// Initializes a new instance of . - /// The threshold will filter based on the similarity metric value. Note this is the canonical definition of similarity metric, not the 'distance' version. The threshold direction (larger or smaller) will be chosen automatically according to the metric used by the field. - public VectorSimilarityThreshold(double value) - { - Value = value; - Kind = VectorThresholdKind.VectorSimilarity; - } - - /// Initializes a new instance of . - /// The kind of threshold used to filter vector queries. - /// The threshold will filter based on the similarity metric value. Note this is the canonical definition of similarity metric, not the 'distance' version. The threshold direction (larger or smaller) will be chosen automatically according to the metric used by the field. - internal VectorSimilarityThreshold(VectorThresholdKind kind, double value) : base(kind) - { - Value = value; - Kind = kind; - } - - /// The threshold will filter based on the similarity metric value. Note this is the canonical definition of similarity metric, not the 'distance' version. The threshold direction (larger or smaller) will be chosen automatically according to the metric used by the field. - public double Value { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.Serialization.cs deleted file mode 100644 index d83bad7f20ca..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.Serialization.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorThreshold : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static VectorThreshold DeserializeVectorThreshold(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - if (element.TryGetProperty("kind", out JsonElement discriminator)) - { - switch (discriminator.GetString()) - { - case "searchScore": return SearchScoreThreshold.DeserializeSearchScoreThreshold(element); - case "vectorSimilarity": return VectorSimilarityThreshold.DeserializeVectorSimilarityThreshold(element); - } - } - return UnknownVectorThreshold.DeserializeUnknownVectorThreshold(element); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorThreshold FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorThreshold(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.cs deleted file mode 100644 index d02a782f340c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThreshold.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// - /// The threshold used for vector queries. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public abstract partial class VectorThreshold - { - /// Initializes a new instance of . - protected VectorThreshold() - { - } - - /// Initializes a new instance of . - /// The kind of threshold used to filter vector queries. - internal VectorThreshold(VectorThresholdKind kind) - { - Kind = kind; - } - - /// The kind of threshold used to filter vector queries. - internal VectorThresholdKind Kind { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.Serialization.cs deleted file mode 100644 index f33b607a7b13..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.Serialization.cs +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorizableImageBinaryQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Base64Image)) - { - writer.WritePropertyName("base64Image"u8); - writer.WriteStringValue(Base64Image); - } - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static VectorizableImageBinaryQuery DeserializeVectorizableImageBinaryQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string base64Image = default; - VectorQueryKind kind = default; - int? k = default; - string fields = default; - bool? exhaustive = default; - double? oversampling = default; - float? weight = default; - VectorThreshold threshold = default; - string filterOverride = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("base64Image"u8)) - { - base64Image = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorQueryKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("k"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - k = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("fields"u8)) - { - fields = property.Value.GetString(); - continue; - } - if (property.NameEquals("exhaustive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("oversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - oversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("weight"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - weight = property.Value.GetSingle(); - continue; - } - if (property.NameEquals("threshold"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = VectorThreshold.DeserializeVectorThreshold(property.Value); - continue; - } - if (property.NameEquals("filterOverride"u8)) - { - filterOverride = property.Value.GetString(); - continue; - } - } - return new VectorizableImageBinaryQuery( - kind, - k, - fields, - exhaustive, - oversampling, - weight, - threshold, - filterOverride, - base64Image); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VectorizableImageBinaryQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorizableImageBinaryQuery(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.cs deleted file mode 100644 index 2bd927d67b15..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageBinaryQuery.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The query parameters to use for vector search when a base 64 encoded binary of an image that needs to be vectorized is provided. - public partial class VectorizableImageBinaryQuery : VectorQuery - { - /// Initializes a new instance of . - public VectorizableImageBinaryQuery() - { - Kind = VectorQueryKind.ImageBinary; - } - - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - /// The base 64 encoded binary of an image to be vectorized to perform a vector search query. - internal VectorizableImageBinaryQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, string base64Image) : base(kind, kNearestNeighborsCount, fieldsRaw, exhaustive, oversampling, weight, threshold, filterOverride) - { - Base64Image = base64Image; - Kind = kind; - } - - /// The base 64 encoded binary of an image to be vectorized to perform a vector search query. - public string Base64Image { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.Serialization.cs deleted file mode 100644 index d950d85484b8..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.Serialization.cs +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorizableImageUrlQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Url)) - { - writer.WritePropertyName("url"u8); - writer.WriteStringValue(Url.AbsoluteUri); - } - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static VectorizableImageUrlQuery DeserializeVectorizableImageUrlQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - Uri url = default; - VectorQueryKind kind = default; - int? k = default; - string fields = default; - bool? exhaustive = default; - double? oversampling = default; - float? weight = default; - VectorThreshold threshold = default; - string filterOverride = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("url"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - url = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorQueryKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("k"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - k = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("fields"u8)) - { - fields = property.Value.GetString(); - continue; - } - if (property.NameEquals("exhaustive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("oversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - oversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("weight"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - weight = property.Value.GetSingle(); - continue; - } - if (property.NameEquals("threshold"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = VectorThreshold.DeserializeVectorThreshold(property.Value); - continue; - } - if (property.NameEquals("filterOverride"u8)) - { - filterOverride = property.Value.GetString(); - continue; - } - } - return new VectorizableImageUrlQuery( - kind, - k, - fields, - exhaustive, - oversampling, - weight, - threshold, - filterOverride, - url); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VectorizableImageUrlQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorizableImageUrlQuery(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.cs deleted file mode 100644 index 38d5b710e388..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableImageUrlQuery.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - /// The query parameters to use for vector search when an url that represents an image value that needs to be vectorized is provided. - public partial class VectorizableImageUrlQuery : VectorQuery - { - /// Initializes a new instance of . - public VectorizableImageUrlQuery() - { - Kind = VectorQueryKind.ImageUrl; - } - - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - /// The URL of an image to be vectorized to perform a vector search query. - internal VectorizableImageUrlQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, Uri url) : base(kind, kNearestNeighborsCount, fieldsRaw, exhaustive, oversampling, weight, threshold, filterOverride) - { - Url = url; - Kind = kind; - } - - /// The URL of an image to be vectorized to perform a vector search query. - public Uri Url { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.Serialization.cs deleted file mode 100644 index 4143ea131796..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.Serialization.cs +++ /dev/null @@ -1,187 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorizableTextQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("text"u8); - writer.WriteStringValue(Text); - if (Optional.IsDefined(QueryRewrites)) - { - writer.WritePropertyName("queryRewrites"u8); - writer.WriteStringValue(QueryRewrites.Value.ToString()); - } - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static VectorizableTextQuery DeserializeVectorizableTextQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string text = default; - QueryRewritesType? queryRewrites = default; - VectorQueryKind kind = default; - int? k = default; - string fields = default; - bool? exhaustive = default; - double? oversampling = default; - float? weight = default; - VectorThreshold threshold = default; - string filterOverride = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("text"u8)) - { - text = property.Value.GetString(); - continue; - } - if (property.NameEquals("queryRewrites"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - queryRewrites = new QueryRewritesType(property.Value.GetString()); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorQueryKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("k"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - k = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("fields"u8)) - { - fields = property.Value.GetString(); - continue; - } - if (property.NameEquals("exhaustive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("oversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - oversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("weight"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - weight = property.Value.GetSingle(); - continue; - } - if (property.NameEquals("threshold"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = VectorThreshold.DeserializeVectorThreshold(property.Value); - continue; - } - if (property.NameEquals("filterOverride"u8)) - { - filterOverride = property.Value.GetString(); - continue; - } - } - return new VectorizableTextQuery( - kind, - k, - fields, - exhaustive, - oversampling, - weight, - threshold, - filterOverride, - text, - queryRewrites); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VectorizableTextQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorizableTextQuery(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.cs deleted file mode 100644 index c72e86c1ceb3..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizableTextQuery.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - /// The query parameters to use for vector search when a text value that needs to be vectorized is provided. - public partial class VectorizableTextQuery : VectorQuery - { - /// Initializes a new instance of . - /// The text to be vectorized to perform a vector search query. - /// is null. - public VectorizableTextQuery(string text) - { - Argument.AssertNotNull(text, nameof(text)); - - Text = text; - Kind = VectorQueryKind.Text; - } - - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - /// The text to be vectorized to perform a vector search query. - /// Can be configured to let a generative model rewrite the query before sending it to be vectorized. - internal VectorizableTextQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, string text, QueryRewritesType? queryRewrites) : base(kind, kNearestNeighborsCount, fieldsRaw, exhaustive, oversampling, weight, threshold, filterOverride) - { - Text = text; - QueryRewrites = queryRewrites; - Kind = kind; - } - /// Can be configured to let a generative model rewrite the query before sending it to be vectorized. - public QueryRewritesType? QueryRewrites { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.Serialization.cs deleted file mode 100644 index 0bd9a870d967..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.Serialization.cs +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorizedQuery : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("vector"u8); - writer.WriteStartArray(); - foreach (var item in Vector.Span) - { - writer.WriteNumberValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - if (Optional.IsDefined(KNearestNeighborsCount)) - { - writer.WritePropertyName("k"u8); - writer.WriteNumberValue(KNearestNeighborsCount.Value); - } - if (Optional.IsDefined(FieldsRaw)) - { - writer.WritePropertyName("fields"u8); - writer.WriteStringValue(FieldsRaw); - } - if (Optional.IsDefined(Exhaustive)) - { - writer.WritePropertyName("exhaustive"u8); - writer.WriteBooleanValue(Exhaustive.Value); - } - if (Optional.IsDefined(Oversampling)) - { - writer.WritePropertyName("oversampling"u8); - writer.WriteNumberValue(Oversampling.Value); - } - if (Optional.IsDefined(Weight)) - { - writer.WritePropertyName("weight"u8); - writer.WriteNumberValue(Weight.Value); - } - if (Optional.IsDefined(Threshold)) - { - writer.WritePropertyName("threshold"u8); - writer.WriteObjectValue(Threshold); - } - if (Optional.IsDefined(FilterOverride)) - { - writer.WritePropertyName("filterOverride"u8); - writer.WriteStringValue(FilterOverride); - } - writer.WriteEndObject(); - } - - internal static VectorizedQuery DeserializeVectorizedQuery(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - ReadOnlyMemory vector = default; - VectorQueryKind kind = default; - int? k = default; - string fields = default; - bool? exhaustive = default; - double? oversampling = default; - float? weight = default; - VectorThreshold threshold = default; - string filterOverride = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("vector"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - int index = 0; - float[] array = new float[property.Value.GetArrayLength()]; - foreach (var item in property.Value.EnumerateArray()) - { - array[index] = item.GetSingle(); - index++; - } - vector = new ReadOnlyMemory(array); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorQueryKind(property.Value.GetString()); - continue; - } - if (property.NameEquals("k"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - k = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("fields"u8)) - { - fields = property.Value.GetString(); - continue; - } - if (property.NameEquals("exhaustive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - exhaustive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("oversampling"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - oversampling = property.Value.GetDouble(); - continue; - } - if (property.NameEquals("weight"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - weight = property.Value.GetSingle(); - continue; - } - if (property.NameEquals("threshold"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - threshold = VectorThreshold.DeserializeVectorThreshold(property.Value); - continue; - } - if (property.NameEquals("filterOverride"u8)) - { - filterOverride = property.Value.GetString(); - continue; - } - } - return new VectorizedQuery( - kind, - k, - fields, - exhaustive, - oversampling, - weight, - threshold, - filterOverride, - vector); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VectorizedQuery FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorizedQuery(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.cs deleted file mode 100644 index 60236c73dfd9..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorizedQuery.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Models -{ - /// The query parameters to use for vector search when a raw vector value is provided. - public partial class VectorizedQuery : VectorQuery - { - /// Initializes a new instance of . - /// The vector representation of a search query. - public VectorizedQuery(ReadOnlyMemory vector) - { - Vector = vector; - Kind = VectorQueryKind.Vector; - } - - /// Initializes a new instance of . - /// The kind of vector query being performed. - /// Number of nearest neighbors to return as top hits. - /// Vector Fields of type Collection(Edm.Single) to be included in the vector searched. - /// When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. - /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. - /// Relative weight of the vector query when compared to other vector query and/or the text query within the same search request. This value is used when combining the results of multiple ranking lists produced by the different vector queries and/or the results retrieved through the text query. The higher the weight, the higher the documents that matched that query will be in the final ranking. Default is 1.0 and the value needs to be a positive number larger than zero. - /// - /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - /// The OData filter expression to apply to this specific vector query. If no filter expression is defined at the vector level, the expression defined in the top level filter parameter is used instead. - /// The vector representation of a search query. - internal VectorizedQuery(VectorQueryKind kind, int? kNearestNeighborsCount, string fieldsRaw, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, ReadOnlyMemory vector) : base(kind, kNearestNeighborsCount, fieldsRaw, exhaustive, oversampling, weight, threshold, filterOverride) - { - Vector = vector; - Kind = kind; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.Serialization.cs deleted file mode 100644 index 555d305062bf..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.Serialization.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; - -namespace Azure.Search.Documents.Models -{ - public partial class VectorsDebugInfo - { - internal static VectorsDebugInfo DeserializeVectorsDebugInfo(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - QueryResultDocumentSubscores subscores = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("subscores"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - subscores = QueryResultDocumentSubscores.DeserializeQueryResultDocumentSubscores(property.Value); - continue; - } - } - return new VectorsDebugInfo(subscores); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static VectorsDebugInfo FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVectorsDebugInfo(document.RootElement); - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.cs deleted file mode 100644 index c0e0cf118564..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorsDebugInfo.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -namespace Azure.Search.Documents.Models -{ - /// The VectorsDebugInfo. - public partial class VectorsDebugInfo - { - /// Initializes a new instance of . - internal VectorsDebugInfo() - { - } - - /// Initializes a new instance of . - /// The breakdown of subscores of the document prior to the chosen result set fusion/combination method such as RRF. - internal VectorsDebugInfo(QueryResultDocumentSubscores subscores) - { - Subscores = subscores; - } - - /// The breakdown of subscores of the document prior to the chosen result set fusion/combination method such as RRF. - public QueryResultDocumentSubscores Subscores { get; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.Serialization.cs deleted file mode 100644 index 7ed64dec432f..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.Serialization.cs +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class VisionVectorizeSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (ModelVersion != null) - { - writer.WritePropertyName("modelVersion"u8); - writer.WriteStringValue(ModelVersion); - } - else - { - writer.WriteNull("modelVersion"); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static VisionVectorizeSkill DeserializeVisionVectorizeSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string modelVersion = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("modelVersion"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - modelVersion = null; - continue; - } - modelVersion = property.Value.GetString(); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new VisionVectorizeSkill( - odataType, - name, - description, - context, - inputs, - outputs, - modelVersion); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new VisionVectorizeSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeVisionVectorizeSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.cs deleted file mode 100644 index 18d17e3a7e81..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisionVectorizeSkill.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Allows you to generate a vector embedding for a given image or text input using the Azure AI Services Vision Vectorize API. - public partial class VisionVectorizeSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - /// or is null. - public VisionVectorizeSkill(IEnumerable inputs, IEnumerable outputs, string modelVersion) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - - ModelVersion = modelVersion; - ODataType = "#Microsoft.Skills.Vision.VectorizeSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - internal VisionVectorizeSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string modelVersion) : base(oDataType, name, description, context, inputs, outputs) - { - ModelVersion = modelVersion; - ODataType = oDataType ?? "#Microsoft.Skills.Vision.VectorizeSkill"; - } - - /// The version of the model to use when calling the AI Services Vision service. It will default to the latest available when not specified. - public string ModelVersion { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.Serialization.cs deleted file mode 100644 index 5c0503004795..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.Serialization.cs +++ /dev/null @@ -1,310 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class WebApiSkill : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - writer.WritePropertyName("uri"u8); - writer.WriteStringValue(Uri); - if (Optional.IsCollectionDefined(HttpHeaders)) - { - if (HttpHeaders != null) - { - writer.WritePropertyName("httpHeaders"u8); - writer.WriteStartObject(); - foreach (var item in HttpHeaders) - { - writer.WritePropertyName(item.Key); - writer.WriteStringValue(item.Value); - } - writer.WriteEndObject(); - } - else - { - writer.WriteNull("httpHeaders"); - } - } - if (Optional.IsDefined(HttpMethod)) - { - writer.WritePropertyName("httpMethod"u8); - writer.WriteStringValue(HttpMethod); - } - if (Optional.IsDefined(Timeout)) - { - if (Timeout != null) - { - writer.WritePropertyName("timeout"u8); - writer.WriteStringValue(Timeout.Value, "P"); - } - else - { - writer.WriteNull("timeout"); - } - } - if (Optional.IsDefined(BatchSize)) - { - if (BatchSize != null) - { - writer.WritePropertyName("batchSize"u8); - writer.WriteNumberValue(BatchSize.Value); - } - else - { - writer.WriteNull("batchSize"); - } - } - if (Optional.IsDefined(DegreeOfParallelism)) - { - if (DegreeOfParallelism != null) - { - writer.WritePropertyName("degreeOfParallelism"u8); - writer.WriteNumberValue(DegreeOfParallelism.Value); - } - else - { - writer.WriteNull("degreeOfParallelism"); - } - } - if (Optional.IsDefined(AuthResourceId)) - { - if (AuthResourceId != null) - { - writer.WritePropertyName("authResourceId"u8); - writer.WriteStringValue(AuthResourceId); - } - else - { - writer.WriteNull("authResourceId"); - } - } - if (Optional.IsDefined(AuthIdentity)) - { - if (AuthIdentity != null) - { - writer.WritePropertyName("authIdentity"u8); - writer.WriteObjectValue(AuthIdentity); - } - else - { - writer.WriteNull("authIdentity"); - } - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - if (Optional.IsDefined(Name)) - { - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - } - if (Optional.IsDefined(Description)) - { - writer.WritePropertyName("description"u8); - writer.WriteStringValue(Description); - } - if (Optional.IsDefined(Context)) - { - writer.WritePropertyName("context"u8); - writer.WriteStringValue(Context); - } - writer.WritePropertyName("inputs"u8); - writer.WriteStartArray(); - foreach (var item in Inputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WritePropertyName("outputs"u8); - writer.WriteStartArray(); - foreach (var item in Outputs) - { - writer.WriteObjectValue(item); - } - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - internal static WebApiSkill DeserializeWebApiSkill(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - string uri = default; - IDictionary httpHeaders = default; - string httpMethod = default; - TimeSpan? timeout = default; - int? batchSize = default; - int? degreeOfParallelism = default; - ResourceIdentifier authResourceId = default; - SearchIndexerDataIdentity authIdentity = default; - string odataType = default; - string name = default; - string description = default; - string context = default; - IList inputs = default; - IList outputs = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("uri"u8)) - { - uri = property.Value.GetString(); - continue; - } - if (property.NameEquals("httpHeaders"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - httpHeaders = null; - continue; - } - Dictionary dictionary = new Dictionary(); - foreach (var property0 in property.Value.EnumerateObject()) - { - dictionary.Add(property0.Name, property0.Value.GetString()); - } - httpHeaders = dictionary; - continue; - } - if (property.NameEquals("httpMethod"u8)) - { - httpMethod = property.Value.GetString(); - continue; - } - if (property.NameEquals("timeout"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - timeout = null; - continue; - } - timeout = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("batchSize"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - batchSize = null; - continue; - } - batchSize = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("degreeOfParallelism"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - degreeOfParallelism = null; - continue; - } - degreeOfParallelism = property.Value.GetInt32(); - continue; - } - if (property.NameEquals("authResourceId"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authResourceId = null; - continue; - } - authResourceId = new ResourceIdentifier(property.Value.GetString()); - continue; - } - if (property.NameEquals("authIdentity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authIdentity = null; - continue; - } - authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("description"u8)) - { - description = property.Value.GetString(); - continue; - } - if (property.NameEquals("context"u8)) - { - context = property.Value.GetString(); - continue; - } - if (property.NameEquals("inputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item)); - } - inputs = array; - continue; - } - if (property.NameEquals("outputs"u8)) - { - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item)); - } - outputs = array; - continue; - } - } - return new WebApiSkill( - odataType, - name, - description, - context, - inputs, - outputs, - uri, - httpHeaders ?? new ChangeTrackingDictionary(), - httpMethod, - timeout, - batchSize, - degreeOfParallelism, - authResourceId, - authIdentity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new WebApiSkill FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeWebApiSkill(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.cs deleted file mode 100644 index 61734865b71d..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiSkill.cs +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// A skill that can call a Web API endpoint, allowing you to extend a skillset by having it call your custom code. - public partial class WebApiSkill : SearchIndexerSkill - { - /// Initializes a new instance of . - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The url for the Web API. - /// , or is null. - public WebApiSkill(IEnumerable inputs, IEnumerable outputs, string uri) : base(inputs, outputs) - { - Argument.AssertNotNull(inputs, nameof(inputs)); - Argument.AssertNotNull(outputs, nameof(outputs)); - Argument.AssertNotNull(uri, nameof(uri)); - - Uri = uri; - HttpHeaders = new ChangeTrackingDictionary(); - ODataType = "#Microsoft.Skills.Custom.WebApiSkill"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// The url for the Web API. - /// The headers required to make the http request. - /// The method for the http request. - /// The desired timeout for the request. Default is 30 seconds. - /// The desired batch size which indicates number of documents. - /// If set, the number of parallel calls that can be made to the Web API. - /// Applies to custom skills that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the custom skill connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal WebApiSkill(string oDataType, string name, string description, string context, IList inputs, IList outputs, string uri, IDictionary httpHeaders, string httpMethod, TimeSpan? timeout, int? batchSize, int? degreeOfParallelism, ResourceIdentifier authResourceId, SearchIndexerDataIdentity authIdentity) : base(oDataType, name, description, context, inputs, outputs) - { - Uri = uri; - HttpHeaders = httpHeaders; - HttpMethod = httpMethod; - Timeout = timeout; - BatchSize = batchSize; - DegreeOfParallelism = degreeOfParallelism; - AuthResourceId = authResourceId; - AuthIdentity = authIdentity; - ODataType = oDataType ?? "#Microsoft.Skills.Custom.WebApiSkill"; - } - /// The method for the http request. - public string HttpMethod { get; set; } - /// The desired timeout for the request. Default is 30 seconds. - public TimeSpan? Timeout { get; set; } - /// The desired batch size which indicates number of documents. - public int? BatchSize { get; set; } - /// If set, the number of parallel calls that can be made to the Web API. - public int? DegreeOfParallelism { get; set; } - /// Applies to custom skills that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the custom skill connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. - public ResourceIdentifier AuthResourceId { get; set; } - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity AuthIdentity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.Serialization.cs deleted file mode 100644 index e0c56920b66c..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.Serialization.cs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class WebApiVectorizer : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Parameters)) - { - writer.WritePropertyName("customWebApiParameters"u8); - writer.WriteObjectValue(Parameters); - } - writer.WritePropertyName("name"u8); - writer.WriteStringValue(VectorizerName); - writer.WritePropertyName("kind"u8); - writer.WriteStringValue(Kind.ToString()); - writer.WriteEndObject(); - } - - internal static WebApiVectorizer DeserializeWebApiVectorizer(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - WebApiVectorizerParameters customWebApiParameters = default; - string name = default; - VectorSearchVectorizerKind kind = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("customWebApiParameters"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - customWebApiParameters = WebApiVectorizerParameters.DeserializeWebApiVectorizerParameters(property.Value); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - if (property.NameEquals("kind"u8)) - { - kind = new VectorSearchVectorizerKind(property.Value.GetString()); - continue; - } - } - return new WebApiVectorizer(name, kind, customWebApiParameters); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new WebApiVectorizer FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeWebApiVectorizer(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.cs deleted file mode 100644 index e605e98da9a7..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizer.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies a user-defined vectorizer for generating the vector embedding of a query string. Integration of an external vectorizer is achieved using the custom Web API interface of a skillset. - public partial class WebApiVectorizer : VectorSearchVectorizer - { - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// is null. - public WebApiVectorizer(string vectorizerName) : base(vectorizerName) - { - Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); - - Kind = VectorSearchVectorizerKind.CustomWebApi; - } - - /// Initializes a new instance of . - /// The name to associate with this particular vectorization method. - /// The name of the kind of vectorization method being configured for use with vector search. - /// Specifies the properties of the user-defined vectorizer. - internal WebApiVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, WebApiVectorizerParameters parameters) : base(vectorizerName, kind) - { - Parameters = parameters; - Kind = kind; - } - - /// Specifies the properties of the user-defined vectorizer. - public WebApiVectorizerParameters Parameters { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.Serialization.cs deleted file mode 100644 index 64f34d846693..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.Serialization.cs +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class WebApiVectorizerParameters : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(Uri)) - { - writer.WritePropertyName("uri"u8); - writer.WriteStringValue(Uri.AbsoluteUri); - } - if (Optional.IsCollectionDefined(HttpHeaders)) - { - writer.WritePropertyName("httpHeaders"u8); - writer.WriteStartObject(); - foreach (var item in HttpHeaders) - { - writer.WritePropertyName(item.Key); - writer.WriteStringValue(item.Value); - } - writer.WriteEndObject(); - } - if (Optional.IsDefined(HttpMethod)) - { - writer.WritePropertyName("httpMethod"u8); - writer.WriteStringValue(HttpMethod); - } - if (Optional.IsDefined(Timeout)) - { - writer.WritePropertyName("timeout"u8); - writer.WriteStringValue(Timeout.Value, "P"); - } - if (Optional.IsDefined(AuthResourceId)) - { - if (AuthResourceId != null) - { - writer.WritePropertyName("authResourceId"u8); - writer.WriteStringValue(AuthResourceId); - } - else - { - writer.WriteNull("authResourceId"); - } - } - if (Optional.IsDefined(AuthIdentity)) - { - if (AuthIdentity != null) - { - writer.WritePropertyName("authIdentity"u8); - writer.WriteObjectValue(AuthIdentity); - } - else - { - writer.WriteNull("authIdentity"); - } - } - writer.WriteEndObject(); - } - - internal static WebApiVectorizerParameters DeserializeWebApiVectorizerParameters(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - Uri uri = default; - IDictionary httpHeaders = default; - string httpMethod = default; - TimeSpan? timeout = default; - ResourceIdentifier authResourceId = default; - SearchIndexerDataIdentity authIdentity = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("uri"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - uri = new Uri(property.Value.GetString()); - continue; - } - if (property.NameEquals("httpHeaders"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - Dictionary dictionary = new Dictionary(); - foreach (var property0 in property.Value.EnumerateObject()) - { - dictionary.Add(property0.Name, property0.Value.GetString()); - } - httpHeaders = dictionary; - continue; - } - if (property.NameEquals("httpMethod"u8)) - { - httpMethod = property.Value.GetString(); - continue; - } - if (property.NameEquals("timeout"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - timeout = property.Value.GetTimeSpan("P"); - continue; - } - if (property.NameEquals("authResourceId"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authResourceId = null; - continue; - } - authResourceId = new ResourceIdentifier(property.Value.GetString()); - continue; - } - if (property.NameEquals("authIdentity"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - authIdentity = null; - continue; - } - authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value); - continue; - } - } - return new WebApiVectorizerParameters( - uri, - httpHeaders ?? new ChangeTrackingDictionary(), - httpMethod, - timeout, - authResourceId, - authIdentity); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static WebApiVectorizerParameters FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeWebApiVectorizerParameters(document.RootElement); - } - - /// Convert into a . - internal virtual RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.cs deleted file mode 100644 index 8290bcf42674..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WebApiVectorizerParameters.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Specifies the properties for connecting to a user-defined vectorizer. - public partial class WebApiVectorizerParameters - { - /// Initializes a new instance of . - public WebApiVectorizerParameters() - { - HttpHeaders = new ChangeTrackingDictionary(); - } - - /// Initializes a new instance of . - /// The URI of the Web API providing the vectorizer. - /// The headers required to make the HTTP request. - /// The method for the HTTP request. - /// The desired timeout for the request. Default is 30 seconds. - /// Applies to custom endpoints that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the vectorization connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - internal WebApiVectorizerParameters(Uri uri, IDictionary httpHeaders, string httpMethod, TimeSpan? timeout, ResourceIdentifier authResourceId, SearchIndexerDataIdentity authIdentity) - { - Uri = uri; - HttpHeaders = httpHeaders; - HttpMethod = httpMethod; - Timeout = timeout; - AuthResourceId = authResourceId; - AuthIdentity = authIdentity; - } - - /// The URI of the Web API providing the vectorizer. - public Uri Uri { get; set; } - /// The headers required to make the HTTP request. - public IDictionary HttpHeaders { get; } - /// The method for the HTTP request. - public string HttpMethod { get; set; } - /// The desired timeout for the request. Default is 30 seconds. - public TimeSpan? Timeout { get; set; } - /// Applies to custom endpoints that connect to external code in an Azure function or some other application that provides the transformations. This value should be the application ID created for the function or app when it was registered with Azure Active Directory. When specified, the vectorization connects to the function or app using a managed ID (either system or user-assigned) of the search service and the access token of the function or app, using this value as the resource id for creating the scope of the access token. - public ResourceIdentifier AuthResourceId { get; set; } - /// - /// The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. - /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . - /// - public SearchIndexerDataIdentity AuthIdentity { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.Serialization.cs deleted file mode 100644 index b1668e1a03bd..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.Serialization.cs +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System.Collections.Generic; -using System.Text.Json; -using Azure.Core; - -namespace Azure.Search.Documents.Indexes.Models -{ - public partial class WordDelimiterTokenFilter : IUtf8JsonSerializable - { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) - { - writer.WriteStartObject(); - if (Optional.IsDefined(GenerateWordParts)) - { - writer.WritePropertyName("generateWordParts"u8); - writer.WriteBooleanValue(GenerateWordParts.Value); - } - if (Optional.IsDefined(GenerateNumberParts)) - { - writer.WritePropertyName("generateNumberParts"u8); - writer.WriteBooleanValue(GenerateNumberParts.Value); - } - if (Optional.IsDefined(CatenateWords)) - { - writer.WritePropertyName("catenateWords"u8); - writer.WriteBooleanValue(CatenateWords.Value); - } - if (Optional.IsDefined(CatenateNumbers)) - { - writer.WritePropertyName("catenateNumbers"u8); - writer.WriteBooleanValue(CatenateNumbers.Value); - } - if (Optional.IsDefined(CatenateAll)) - { - writer.WritePropertyName("catenateAll"u8); - writer.WriteBooleanValue(CatenateAll.Value); - } - if (Optional.IsDefined(SplitOnCaseChange)) - { - writer.WritePropertyName("splitOnCaseChange"u8); - writer.WriteBooleanValue(SplitOnCaseChange.Value); - } - if (Optional.IsDefined(PreserveOriginal)) - { - writer.WritePropertyName("preserveOriginal"u8); - writer.WriteBooleanValue(PreserveOriginal.Value); - } - if (Optional.IsDefined(SplitOnNumerics)) - { - writer.WritePropertyName("splitOnNumerics"u8); - writer.WriteBooleanValue(SplitOnNumerics.Value); - } - if (Optional.IsDefined(StemEnglishPossessive)) - { - writer.WritePropertyName("stemEnglishPossessive"u8); - writer.WriteBooleanValue(StemEnglishPossessive.Value); - } - if (Optional.IsCollectionDefined(ProtectedWords)) - { - writer.WritePropertyName("protectedWords"u8); - writer.WriteStartArray(); - foreach (var item in ProtectedWords) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); - } - writer.WritePropertyName("@odata.type"u8); - writer.WriteStringValue(ODataType); - writer.WritePropertyName("name"u8); - writer.WriteStringValue(Name); - writer.WriteEndObject(); - } - - internal static WordDelimiterTokenFilter DeserializeWordDelimiterTokenFilter(JsonElement element) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - bool? generateWordParts = default; - bool? generateNumberParts = default; - bool? catenateWords = default; - bool? catenateNumbers = default; - bool? catenateAll = default; - bool? splitOnCaseChange = default; - bool? preserveOriginal = default; - bool? splitOnNumerics = default; - bool? stemEnglishPossessive = default; - IList protectedWords = default; - string odataType = default; - string name = default; - foreach (var property in element.EnumerateObject()) - { - if (property.NameEquals("generateWordParts"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - generateWordParts = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("generateNumberParts"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - generateNumberParts = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("catenateWords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - catenateWords = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("catenateNumbers"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - catenateNumbers = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("catenateAll"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - catenateAll = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("splitOnCaseChange"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - splitOnCaseChange = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("preserveOriginal"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - preserveOriginal = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("splitOnNumerics"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - splitOnNumerics = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("stemEnglishPossessive"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - stemEnglishPossessive = property.Value.GetBoolean(); - continue; - } - if (property.NameEquals("protectedWords"u8)) - { - if (property.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in property.Value.EnumerateArray()) - { - array.Add(item.GetString()); - } - protectedWords = array; - continue; - } - if (property.NameEquals("@odata.type"u8)) - { - odataType = property.Value.GetString(); - continue; - } - if (property.NameEquals("name"u8)) - { - name = property.Value.GetString(); - continue; - } - } - return new WordDelimiterTokenFilter( - odataType, - name, - generateWordParts, - generateNumberParts, - catenateWords, - catenateNumbers, - catenateAll, - splitOnCaseChange, - preserveOriginal, - splitOnNumerics, - stemEnglishPossessive, - protectedWords ?? new ChangeTrackingList()); - } - - /// Deserializes the model from a raw response. - /// The response to deserialize the model from. - internal static new WordDelimiterTokenFilter FromResponse(Response response) - { - using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); - return DeserializeWordDelimiterTokenFilter(document.RootElement); - } - - /// Convert into a . - internal override RequestContent ToRequestContent() - { - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); - return content; - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.cs deleted file mode 100644 index f0fc22e07f02..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/WordDelimiterTokenFilter.cs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Collections.Generic; - -namespace Azure.Search.Documents.Indexes.Models -{ - /// Splits words into subwords and performs optional transformations on subword groups. This token filter is implemented using Apache Lucene. - public partial class WordDelimiterTokenFilter : TokenFilter - { - /// Initializes a new instance of . - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// is null. - public WordDelimiterTokenFilter(string name) : base(name) - { - Argument.AssertNotNull(name, nameof(name)); - - ProtectedWords = new ChangeTrackingList(); - ODataType = "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; - } - - /// Initializes a new instance of . - /// A URI fragment specifying the type of token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A value indicating whether to generate part words. If set, causes parts of words to be generated; for example "AzureSearch" becomes "Azure" "Search". Default is true. - /// A value indicating whether to generate number subwords. Default is true. - /// A value indicating whether maximum runs of word parts will be catenated. For example, if this is set to true, "Azure-Search" becomes "AzureSearch". Default is false. - /// A value indicating whether maximum runs of number parts will be catenated. For example, if this is set to true, "1-2" becomes "12". Default is false. - /// A value indicating whether all subword parts will be catenated. For example, if this is set to true, "Azure-Search-1" becomes "AzureSearch1". Default is false. - /// A value indicating whether to split words on caseChange. For example, if this is set to true, "AzureSearch" becomes "Azure" "Search". Default is true. - /// A value indicating whether original words will be preserved and added to the subword list. Default is false. - /// A value indicating whether to split on numbers. For example, if this is set to true, "Azure1Search" becomes "Azure" "1" "Search". Default is true. - /// A value indicating whether to remove trailing "'s" for each subword. Default is true. - /// A list of tokens to protect from being delimited. - internal WordDelimiterTokenFilter(string oDataType, string name, bool? generateWordParts, bool? generateNumberParts, bool? catenateWords, bool? catenateNumbers, bool? catenateAll, bool? splitOnCaseChange, bool? preserveOriginal, bool? splitOnNumerics, bool? stemEnglishPossessive, IList protectedWords) : base(oDataType, name) - { - GenerateWordParts = generateWordParts; - GenerateNumberParts = generateNumberParts; - CatenateWords = catenateWords; - CatenateNumbers = catenateNumbers; - CatenateAll = catenateAll; - SplitOnCaseChange = splitOnCaseChange; - PreserveOriginal = preserveOriginal; - SplitOnNumerics = splitOnNumerics; - StemEnglishPossessive = stemEnglishPossessive; - ProtectedWords = protectedWords; - ODataType = oDataType ?? "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; - } - - /// A value indicating whether to generate part words. If set, causes parts of words to be generated; for example "AzureSearch" becomes "Azure" "Search". Default is true. - public bool? GenerateWordParts { get; set; } - /// A value indicating whether to generate number subwords. Default is true. - public bool? GenerateNumberParts { get; set; } - /// A value indicating whether maximum runs of word parts will be catenated. For example, if this is set to true, "Azure-Search" becomes "AzureSearch". Default is false. - public bool? CatenateWords { get; set; } - /// A value indicating whether maximum runs of number parts will be catenated. For example, if this is set to true, "1-2" becomes "12". Default is false. - public bool? CatenateNumbers { get; set; } - /// A value indicating whether all subword parts will be catenated. For example, if this is set to true, "Azure-Search-1" becomes "AzureSearch1". Default is false. - public bool? CatenateAll { get; set; } - /// A value indicating whether to split words on caseChange. For example, if this is set to true, "AzureSearch" becomes "Azure" "Search". Default is true. - public bool? SplitOnCaseChange { get; set; } - /// A value indicating whether original words will be preserved and added to the subword list. Default is false. - public bool? PreserveOriginal { get; set; } - /// A value indicating whether to split on numbers. For example, if this is set to true, "Azure1Search" becomes "Azure" "1" "Search". Default is true. - public bool? SplitOnNumerics { get; set; } - /// A value indicating whether to remove trailing "'s" for each subword. Default is true. - public bool? StemEnglishPossessive { get; set; } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.Serialization.cs new file mode 100644 index 000000000000..914ecd358e1b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.Serialization.cs @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class NGramTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.NGramTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MinGram)) + { + writer.WritePropertyName("minGram"u8); + writer.WriteNumberValue(MinGram.Value); + } + if (Optional.IsDefined(MaxGram)) + { + writer.WritePropertyName("maxGram"u8); + writer.WriteNumberValue(MaxGram.Value); + } + } + + Search.Documents.Indexes.Models.NGramTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.NGramTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.NGramTokenFilter.DeserializeNGramTokenFilter(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.NGramTokenFilter DeserializeNGramTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? minGram = default; + int? maxGram = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("minGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.NGramTokenFilter(odataType, name, serializedAdditionalRawData, minGram, maxGram); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.NGramTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.NGramTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.NGramTokenFilter.DeserializeNGramTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.NGramTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.NGramTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.NGramTokenFilter.DeserializeNGramTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.cs new file mode 100644 index 000000000000..89bae6fff32e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenFilter.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Generates n-grams of the given size(s). This token filter is implemented using + /// Apache Lucene. + /// + public partial class NGramTokenFilter : Search.Documents.TokenFilter + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + /// The maximum n-gram length. Default is 2. Maximum is 300. + internal NGramTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? minGram, int? maxGram) : base(odataType, name, serializedAdditionalRawData) + { + MinGram = minGram; + MaxGram = maxGram; + } + + /// Initializes a new instance of for deserialization. + internal NGramTokenFilter() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.Serialization.cs new file mode 100644 index 000000000000..317e482356f2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.Serialization.cs @@ -0,0 +1,193 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class NGramTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NGramTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MinGram)) + { + writer.WritePropertyName("minGram"u8); + writer.WriteNumberValue(MinGram.Value); + } + if (Optional.IsDefined(MaxGram)) + { + writer.WritePropertyName("maxGram"u8); + writer.WriteNumberValue(MaxGram.Value); + } + if (Optional.IsCollectionDefined(TokenChars)) + { + writer.WritePropertyName("tokenChars"u8); + writer.WriteStartArray(); + foreach (var item in TokenChars) + { + writer.WriteStringValue(item.ToString()); + } + writer.WriteEndArray(); + } + } + + NGramTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NGramTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeNGramTokenizer(document.RootElement, options); + } + + internal static NGramTokenizer DeserializeNGramTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? minGram = default; + int? maxGram = default; + IList tokenChars = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("minGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxGram"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxGram = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("tokenChars"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(new TokenCharacterKind(item.GetString())); + } + tokenChars = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new NGramTokenizer( + odataType, + name, + serializedAdditionalRawData, + minGram, + maxGram, + tokenChars ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(NGramTokenizer)} does not support writing '{options.Format}' format."); + } + } + + NGramTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeNGramTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(NGramTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new NGramTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeNGramTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.cs new file mode 100644 index 000000000000..255fee42f42b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NGramTokenizer.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Tokenizes the input into n-grams of the given size(s). This tokenizer is + /// implemented using Apache Lucene. + /// + public partial class NGramTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public NGramTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.NGramTokenizer"; + TokenChars = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + /// The maximum n-gram length. Default is 2. Maximum is 300. + /// Character classes to keep in the tokens. + internal NGramTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? minGram, int? maxGram, IList tokenChars) : base(odataType, name, serializedAdditionalRawData) + { + MinGram = minGram; + MaxGram = maxGram; + TokenChars = tokenChars; + } + + /// Initializes a new instance of for deserialization. + internal NGramTokenizer() + { + } + + /// + /// The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the + /// value of maxGram. + /// + public int? MinGram { get; set; } + /// The maximum n-gram length. Default is 2. Maximum is 300. + public int? MaxGram { get; set; } + /// Character classes to keep in the tokens. + public IList TokenChars { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..1914f2f18527 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class NativeBlobSoftDeleteDeletionDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NativeBlobSoftDeleteDeletionDetectionPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + NativeBlobSoftDeleteDeletionDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NativeBlobSoftDeleteDeletionDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(document.RootElement, options); + } + + internal static NativeBlobSoftDeleteDeletionDetectionPolicy DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new NativeBlobSoftDeleteDeletionDetectionPolicy(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(NativeBlobSoftDeleteDeletionDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + NativeBlobSoftDeleteDeletionDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(NativeBlobSoftDeleteDeletionDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new NativeBlobSoftDeleteDeletionDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeNativeBlobSoftDeleteDeletionDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.cs new file mode 100644 index 000000000000..63757ecf76b8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/NativeBlobSoftDeleteDeletionDetectionPolicy.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a data deletion detection policy utilizing Azure Blob Storage's native + /// soft delete feature for deletion detection. + /// + public partial class NativeBlobSoftDeleteDeletionDetectionPolicy : DataDeletionDetectionPolicy + { + /// Initializes a new instance of . + public NativeBlobSoftDeleteDeletionDetectionPolicy() + { + OdataType = "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal NativeBlobSoftDeleteDeletionDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrLineEnding.cs b/sdk/search/Azure.Search.Documents/src/Generated/OcrLineEnding.cs similarity index 93% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/OcrLineEnding.cs rename to sdk/search/Azure.Search.Documents/src/Generated/OcrLineEnding.cs index a33b86e2c61c..a5a01cc05b82 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrLineEnding.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/OcrLineEnding.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is "space". + /// + /// Defines the sequence of characters to use between the lines of text recognized + /// by the OCR skill. The default value is "space". + /// public readonly partial struct OcrLineEnding : IEquatable { private readonly string _value; diff --git a/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.Serialization.cs new file mode 100644 index 000000000000..f6f06efb5826 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.Serialization.cs @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class OcrSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OcrSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(ShouldDetectOrientation)) + { + writer.WritePropertyName("detectOrientation"u8); + writer.WriteBooleanValue(ShouldDetectOrientation.Value); + } + if (Optional.IsDefined(LineEnding)) + { + writer.WritePropertyName("lineEnding"u8); + writer.WriteStringValue(LineEnding.Value.ToString()); + } + } + + OcrSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OcrSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeOcrSkill(document.RootElement, options); + } + + internal static OcrSkill DeserializeOcrSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + OcrSkillLanguage? defaultLanguageCode = default; + bool? detectOrientation = default; + OcrLineEnding? lineEnding = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new OcrSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("detectOrientation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + detectOrientation = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("lineEnding"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lineEnding = new OcrLineEnding(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new OcrSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + detectOrientation, + lineEnding); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(OcrSkill)} does not support writing '{options.Format}' format."); + } + } + + OcrSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeOcrSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(OcrSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new OcrSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeOcrSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.cs new file mode 100644 index 000000000000..ea63d7648205 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkill.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill that extracts text from image files. + public partial class OcrSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public OcrSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Vision.OcrSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// A value indicating to turn orientation detection on or not. Default is false. + /// + /// Defines the sequence of characters to use between the lines of text recognized + /// by the OCR skill. The default value is "space". + /// + internal OcrSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, OcrSkillLanguage? defaultLanguageCode, bool? shouldDetectOrientation, OcrLineEnding? lineEnding) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + ShouldDetectOrientation = shouldDetectOrientation; + LineEnding = lineEnding; + } + + /// Initializes a new instance of for deserialization. + internal OcrSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public OcrSkillLanguage? DefaultLanguageCode { get; set; } + /// A value indicating to turn orientation detection on or not. Default is false. + public bool? ShouldDetectOrientation { get; set; } + /// + /// Defines the sequence of characters to use between the lines of text recognized + /// by the OCR skill. The default value is "space". + /// + public OcrLineEnding? LineEnding { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/OcrSkillLanguage.cs index a3a8d170807f..d621e878a371 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/OcrSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/OcrSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input by OcrSkill. public readonly partial struct OcrSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.Serialization.cs new file mode 100644 index 000000000000..f59a1a050148 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class OutputFieldMappingEntry : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OutputFieldMappingEntry)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(TargetName)) + { + writer.WritePropertyName("targetName"u8); + writer.WriteStringValue(TargetName); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + OutputFieldMappingEntry IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OutputFieldMappingEntry)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeOutputFieldMappingEntry(document.RootElement, options); + } + + internal static OutputFieldMappingEntry DeserializeOutputFieldMappingEntry(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string targetName = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("targetName"u8)) + { + targetName = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new OutputFieldMappingEntry(name, targetName, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(OutputFieldMappingEntry)} does not support writing '{options.Format}' format."); + } + } + + OutputFieldMappingEntry IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeOutputFieldMappingEntry(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(OutputFieldMappingEntry)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static OutputFieldMappingEntry FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeOutputFieldMappingEntry(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.cs b/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.cs new file mode 100644 index 000000000000..96ddb4799ffa --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/OutputFieldMappingEntry.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Output field mapping for a skill. + public partial class OutputFieldMappingEntry + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the output defined by the skill. + /// is null. + public OutputFieldMappingEntry(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The name of the output defined by the skill. + /// The target name of the output. It is optional and default to name. + /// Keeps track of any properties unknown to the library. + internal OutputFieldMappingEntry(string name, string targetName, IDictionary serializedAdditionalRawData) + { + Name = name; + TargetName = targetName; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal OutputFieldMappingEntry() + { + } + + /// The name of the output defined by the skill. + public string Name { get; set; } + /// The target name of the output. It is optional and default to name. + public string TargetName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.Serialization.cs new file mode 100644 index 000000000000..5e1d18f962aa --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.Serialization.cs @@ -0,0 +1,207 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class PathHierarchyTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PathHierarchyTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Delimiter)) + { + writer.WritePropertyName("delimiter"u8); + writer.WriteStringValue(Delimiter); + } + if (Optional.IsDefined(Replacement)) + { + writer.WritePropertyName("replacement"u8); + writer.WriteStringValue(Replacement); + } + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + if (Optional.IsDefined(ReverseTokenOrder)) + { + writer.WritePropertyName("reverse"u8); + writer.WriteBooleanValue(ReverseTokenOrder.Value); + } + if (Optional.IsDefined(NumberOfTokensToSkip)) + { + writer.WritePropertyName("skip"u8); + writer.WriteNumberValue(NumberOfTokensToSkip.Value); + } + } + + Search.Documents.Indexes.Models.PathHierarchyTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PathHierarchyTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.PathHierarchyTokenizer.DeserializePathHierarchyTokenizer(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.PathHierarchyTokenizer DeserializePathHierarchyTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string delimiter = default; + string replacement = default; + int? maxTokenLength = default; + bool? reverse = default; + int? skip = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("delimiter"u8)) + { + delimiter = property.Value.GetString(); + continue; + } + if (property.NameEquals("replacement"u8)) + { + replacement = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("reverse"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + reverse = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("skip"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + skip = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.PathHierarchyTokenizer( + odataType, + name, + serializedAdditionalRawData, + delimiter, + replacement, + maxTokenLength, + reverse, + skip); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PathHierarchyTokenizer)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.PathHierarchyTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.PathHierarchyTokenizer.DeserializePathHierarchyTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PathHierarchyTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.PathHierarchyTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.PathHierarchyTokenizer.DeserializePathHierarchyTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.cs new file mode 100644 index 000000000000..33118500abc2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PathHierarchyTokenizer.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Tokenizer for path-like hierarchies. This tokenizer is implemented using Apache + /// Lucene. + /// + public partial class PathHierarchyTokenizer : Search.Documents.LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public PathHierarchyTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The delimiter character to use. Default is "/". + /// A value that, if set, replaces the delimiter character. Default is "/". + /// The maximum token length. Default and maximum is 300. + /// + /// A value indicating whether to generate tokens in reverse order. Default is + /// false. + /// + /// The number of initial tokens to skip. Default is 0. + internal PathHierarchyTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, string delimiter, string replacement, int? maxTokenLength, bool? reverseTokenOrder, int? numberOfTokensToSkip) : base(odataType, name, serializedAdditionalRawData) + { + Delimiter = delimiter; + Replacement = replacement; + MaxTokenLength = maxTokenLength; + ReverseTokenOrder = reverseTokenOrder; + NumberOfTokensToSkip = numberOfTokensToSkip; + } + + /// Initializes a new instance of for deserialization. + internal PathHierarchyTokenizer() + { + } + + /// The delimiter character to use. Default is "/". + public string Delimiter { get; set; } + /// A value that, if set, replaces the delimiter character. Default is "/". + public string Replacement { get; set; } + /// The maximum token length. Default and maximum is 300. + public int? MaxTokenLength { get; set; } + /// + /// A value indicating whether to generate tokens in reverse order. Default is + /// false. + /// + public bool? ReverseTokenOrder { get; set; } + /// The number of initial tokens to skip. Default is 0. + public int? NumberOfTokensToSkip { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.Serialization.cs new file mode 100644 index 000000000000..46d34b2b05b5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.Serialization.cs @@ -0,0 +1,206 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class PatternAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternAnalyzer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(LowerCaseTerms)) + { + writer.WritePropertyName("lowercase"u8); + writer.WriteBooleanValue(LowerCaseTerms.Value); + } + if (Optional.IsDefined(Pattern)) + { + writer.WritePropertyName("pattern"u8); + writer.WriteStringValue(Pattern); + } + if (Optional.IsDefined(Flags)) + { + writer.WritePropertyName("flags"u8); + writer.WriteStringValue(Flags.Value.ToString()); + } + if (Optional.IsCollectionDefined(Stopwords)) + { + writer.WritePropertyName("stopwords"u8); + writer.WriteStartArray(); + foreach (var item in Stopwords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + } + + PatternAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePatternAnalyzer(document.RootElement, options); + } + + internal static PatternAnalyzer DeserializePatternAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? lowercase = default; + string pattern = default; + Search.Documents.Indexes.Models.RegexFlag? flags = default; + IList stopwords = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("lowercase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lowercase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("pattern"u8)) + { + pattern = property.Value.GetString(); + continue; + } + if (property.NameEquals("flags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + flags = new Search.Documents.Indexes.Models.RegexFlag(property.Value.GetString()); + continue; + } + if (property.NameEquals("stopwords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + stopwords = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PatternAnalyzer( + odataType, + name, + serializedAdditionalRawData, + lowercase, + pattern, + flags, + stopwords ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PatternAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + PatternAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PatternAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PatternAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternAnalyzer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.cs new file mode 100644 index 000000000000..402ce04a2b9d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternAnalyzer.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Flexibly separates text into terms via a regular expression pattern. This + /// analyzer is implemented using Apache Lucene. + /// + public partial class PatternAnalyzer : LexicalAnalyzer + { + /// Initializes a new instance of . + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public PatternAnalyzer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.PatternAnalyzer"; + Stopwords = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating whether terms should be lower-cased. Default is true. + /// + /// A regular expression pattern to match token separators. Default is an + /// expression that matches one or more non-word characters. + /// + /// Regular expression flags. + /// A list of stopwords. + internal PatternAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData, bool? lowerCaseTerms, string pattern, Search.Documents.Indexes.Models.RegexFlag? flags, IList stopwords) : base(odataType, name, serializedAdditionalRawData) + { + LowerCaseTerms = lowerCaseTerms; + Pattern = pattern; + Flags = flags; + Stopwords = stopwords; + } + + /// Initializes a new instance of for deserialization. + internal PatternAnalyzer() + { + } + + /// A value indicating whether terms should be lower-cased. Default is true. + public bool? LowerCaseTerms { get; set; } + /// + /// A regular expression pattern to match token separators. Default is an + /// expression that matches one or more non-word characters. + /// + public string Pattern { get; set; } + /// Regular expression flags. + public Search.Documents.Indexes.Models.RegexFlag? Flags { get; set; } + /// A list of stopwords. + public IList Stopwords { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.Serialization.cs new file mode 100644 index 000000000000..9558bbdac851 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class PatternCaptureTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternCaptureTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("patterns"u8); + writer.WriteStartArray(); + foreach (var item in Patterns) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(PreserveOriginal)) + { + writer.WritePropertyName("preserveOriginal"u8); + writer.WriteBooleanValue(PreserveOriginal.Value); + } + } + + PatternCaptureTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternCaptureTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePatternCaptureTokenFilter(document.RootElement, options); + } + + internal static PatternCaptureTokenFilter DeserializePatternCaptureTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList patterns = default; + bool? preserveOriginal = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("patterns"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + patterns = array; + continue; + } + if (property.NameEquals("preserveOriginal"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preserveOriginal = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PatternCaptureTokenFilter(odataType, name, serializedAdditionalRawData, patterns, preserveOriginal); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PatternCaptureTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + PatternCaptureTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternCaptureTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PatternCaptureTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PatternCaptureTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternCaptureTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.cs new file mode 100644 index 000000000000..56d673945c79 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternCaptureTokenFilter.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Uses Java regexes to emit multiple tokens - one for each capture group in one + /// or more patterns. This token filter is implemented using Apache Lucene. + /// + public partial class PatternCaptureTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// A list of patterns to match against each token. + /// or is null. + public PatternCaptureTokenFilter(string name, IEnumerable patterns) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(patterns, nameof(patterns)); + + OdataType = "#Microsoft.Azure.Search.PatternCaptureTokenFilter"; + Patterns = patterns.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A list of patterns to match against each token. + /// + /// A value indicating whether to return the original token even if one of the + /// patterns matches. Default is true. + /// + internal PatternCaptureTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList patterns, bool? preserveOriginal) : base(odataType, name, serializedAdditionalRawData) + { + Patterns = patterns; + PreserveOriginal = preserveOriginal; + } + + /// Initializes a new instance of for deserialization. + internal PatternCaptureTokenFilter() + { + } + + /// A list of patterns to match against each token. + public IList Patterns { get; } + /// + /// A value indicating whether to return the original token even if one of the + /// patterns matches. Default is true. + /// + public bool? PreserveOriginal { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.Serialization.cs new file mode 100644 index 000000000000..8209af31c38c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.Serialization.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class PatternReplaceCharFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternReplaceCharFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("pattern"u8); + writer.WriteStringValue(Pattern); + writer.WritePropertyName("replacement"u8); + writer.WriteStringValue(Replacement); + } + + PatternReplaceCharFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternReplaceCharFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePatternReplaceCharFilter(document.RootElement, options); + } + + internal static PatternReplaceCharFilter DeserializePatternReplaceCharFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string pattern = default; + string replacement = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("pattern"u8)) + { + pattern = property.Value.GetString(); + continue; + } + if (property.NameEquals("replacement"u8)) + { + replacement = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PatternReplaceCharFilter(odataType, name, serializedAdditionalRawData, pattern, replacement); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PatternReplaceCharFilter)} does not support writing '{options.Format}' format."); + } + } + + PatternReplaceCharFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternReplaceCharFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PatternReplaceCharFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PatternReplaceCharFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternReplaceCharFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.cs new file mode 100644 index 000000000000..39b38422518a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceCharFilter.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A character filter that replaces characters in the input string. It uses a + /// regular expression to identify character sequences to preserve and a + /// replacement pattern to identify characters to replace. For example, given the + /// input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the + /// result would be "aa#bb aa#bb". This character filter is implemented using + /// Apache Lucene. + /// + public partial class PatternReplaceCharFilter : CharFilter + { + /// Initializes a new instance of . + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// A regular expression pattern. + /// The replacement text. + /// , or is null. + public PatternReplaceCharFilter(string name, string pattern, string replacement) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(pattern, nameof(pattern)); + Argument.AssertNotNull(replacement, nameof(replacement)); + + OdataType = "#Microsoft.Azure.Search.PatternReplaceCharFilter"; + Pattern = pattern; + Replacement = replacement; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A regular expression pattern. + /// The replacement text. + internal PatternReplaceCharFilter(string odataType, string name, IDictionary serializedAdditionalRawData, string pattern, string replacement) : base(odataType, name, serializedAdditionalRawData) + { + Pattern = pattern; + Replacement = replacement; + } + + /// Initializes a new instance of for deserialization. + internal PatternReplaceCharFilter() + { + } + + /// A regular expression pattern. + public string Pattern { get; set; } + /// The replacement text. + public string Replacement { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.Serialization.cs new file mode 100644 index 000000000000..c38cd022a366 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.Serialization.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class PatternReplaceTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternReplaceTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("pattern"u8); + writer.WriteStringValue(Pattern); + writer.WritePropertyName("replacement"u8); + writer.WriteStringValue(Replacement); + } + + PatternReplaceTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternReplaceTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePatternReplaceTokenFilter(document.RootElement, options); + } + + internal static PatternReplaceTokenFilter DeserializePatternReplaceTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string pattern = default; + string replacement = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("pattern"u8)) + { + pattern = property.Value.GetString(); + continue; + } + if (property.NameEquals("replacement"u8)) + { + replacement = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PatternReplaceTokenFilter(odataType, name, serializedAdditionalRawData, pattern, replacement); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PatternReplaceTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + PatternReplaceTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternReplaceTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PatternReplaceTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PatternReplaceTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternReplaceTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.cs new file mode 100644 index 000000000000..d2a9ca94033e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternReplaceTokenFilter.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A character filter that replaces characters in the input string. It uses a + /// regular expression to identify character sequences to preserve and a + /// replacement pattern to identify characters to replace. For example, given the + /// input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the + /// result would be "aa#bb aa#bb". This token filter is implemented using Apache + /// Lucene. + /// + public partial class PatternReplaceTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// A regular expression pattern. + /// The replacement text. + /// , or is null. + public PatternReplaceTokenFilter(string name, string pattern, string replacement) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(pattern, nameof(pattern)); + Argument.AssertNotNull(replacement, nameof(replacement)); + + OdataType = "#Microsoft.Azure.Search.PatternReplaceTokenFilter"; + Pattern = pattern; + Replacement = replacement; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A regular expression pattern. + /// The replacement text. + internal PatternReplaceTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, string pattern, string replacement) : base(odataType, name, serializedAdditionalRawData) + { + Pattern = pattern; + Replacement = replacement; + } + + /// Initializes a new instance of for deserialization. + internal PatternReplaceTokenFilter() + { + } + + /// A regular expression pattern. + public string Pattern { get; set; } + /// The replacement text. + public string Replacement { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.Serialization.cs new file mode 100644 index 000000000000..4a93f64bb704 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.Serialization.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class PatternTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Pattern)) + { + writer.WritePropertyName("pattern"u8); + writer.WriteStringValue(Pattern); + } + if (Optional.IsDefined(Flags)) + { + writer.WritePropertyName("flags"u8); + writer.WriteStringValue(Flags.Value.ToString()); + } + if (Optional.IsDefined(Group)) + { + writer.WritePropertyName("group"u8); + writer.WriteNumberValue(Group.Value); + } + } + + PatternTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PatternTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePatternTokenizer(document.RootElement, options); + } + + internal static PatternTokenizer DeserializePatternTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string pattern = default; + Search.Documents.Indexes.Models.RegexFlag? flags = default; + int? group = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("pattern"u8)) + { + pattern = property.Value.GetString(); + continue; + } + if (property.NameEquals("flags"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + flags = new Search.Documents.Indexes.Models.RegexFlag(property.Value.GetString()); + continue; + } + if (property.NameEquals("group"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + group = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PatternTokenizer( + odataType, + name, + serializedAdditionalRawData, + pattern, + flags, + group); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PatternTokenizer)} does not support writing '{options.Format}' format."); + } + } + + PatternTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PatternTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PatternTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePatternTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.cs new file mode 100644 index 000000000000..a2a38012392c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PatternTokenizer.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Tokenizer that uses regex pattern matching to construct distinct tokens. This + /// tokenizer is implemented using Apache Lucene. + /// + public partial class PatternTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public PatternTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.PatternTokenizer"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A regular expression pattern to match token separators. Default is an + /// expression that matches one or more non-word characters. + /// + /// Regular expression flags. + /// + /// The zero-based ordinal of the matching group in the regular expression pattern + /// to extract into tokens. Use -1 if you want to use the entire pattern to split + /// the input into tokens, irrespective of matching groups. Default is -1. + /// + internal PatternTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, string pattern, Search.Documents.Indexes.Models.RegexFlag? flags, int? group) : base(odataType, name, serializedAdditionalRawData) + { + Pattern = pattern; + Flags = flags; + Group = group; + } + + /// Initializes a new instance of for deserialization. + internal PatternTokenizer() + { + } + + /// + /// A regular expression pattern to match token separators. Default is an + /// expression that matches one or more non-word characters. + /// + public string Pattern { get; set; } + /// Regular expression flags. + public Search.Documents.Indexes.Models.RegexFlag? Flags { get; set; } + /// + /// The zero-based ordinal of the matching group in the regular expression pattern + /// to extract into tokens. Use -1 if you want to use the entire pattern to split + /// the input into tokens, irrespective of matching groups. Default is -1. + /// + public int? Group { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PhoneticEncoder.cs b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticEncoder.cs new file mode 100644 index 000000000000..06ec62b07bd1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticEncoder.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Identifies the type of phonetic encoder to use with a PhoneticTokenFilter. + public readonly partial struct PhoneticEncoder : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public PhoneticEncoder(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string MetaphoneValue = "metaphone"; + private const string DoubleMetaphoneValue = "doubleMetaphone"; + private const string SoundexValue = "soundex"; + private const string RefinedSoundexValue = "refinedSoundex"; + private const string Caverphone1Value = "caverphone1"; + private const string Caverphone2Value = "caverphone2"; + private const string CologneValue = "cologne"; + private const string NysiisValue = "nysiis"; + private const string KoelnerPhonetikValue = "koelnerPhonetik"; + private const string HaasePhonetikValue = "haasePhonetik"; + private const string BeiderMorseValue = "beiderMorse"; + + /// Encodes a token into a Metaphone value. + public static PhoneticEncoder Metaphone { get; } = new PhoneticEncoder(MetaphoneValue); + /// Encodes a token into a double metaphone value. + public static PhoneticEncoder DoubleMetaphone { get; } = new PhoneticEncoder(DoubleMetaphoneValue); + /// Encodes a token into a Soundex value. + public static PhoneticEncoder Soundex { get; } = new PhoneticEncoder(SoundexValue); + /// Encodes a token into a Refined Soundex value. + public static PhoneticEncoder RefinedSoundex { get; } = new PhoneticEncoder(RefinedSoundexValue); + /// Encodes a token into a Caverphone 1.0 value. + public static PhoneticEncoder Caverphone1 { get; } = new PhoneticEncoder(Caverphone1Value); + /// Encodes a token into a Caverphone 2.0 value. + public static PhoneticEncoder Caverphone2 { get; } = new PhoneticEncoder(Caverphone2Value); + /// Encodes a token into a Cologne Phonetic value. + public static PhoneticEncoder Cologne { get; } = new PhoneticEncoder(CologneValue); + /// Encodes a token into a NYSIIS value. + public static PhoneticEncoder Nysiis { get; } = new PhoneticEncoder(NysiisValue); + /// Encodes a token using the Kölner Phonetik algorithm. + public static PhoneticEncoder KoelnerPhonetik { get; } = new PhoneticEncoder(KoelnerPhonetikValue); + /// Encodes a token using the Haase refinement of the Kölner Phonetik algorithm. + public static PhoneticEncoder HaasePhonetik { get; } = new PhoneticEncoder(HaasePhonetikValue); + /// Encodes a token into a Beider-Morse value. + public static PhoneticEncoder BeiderMorse { get; } = new PhoneticEncoder(BeiderMorseValue); + /// Determines if two values are the same. + public static bool operator ==(PhoneticEncoder left, PhoneticEncoder right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(PhoneticEncoder left, PhoneticEncoder right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator PhoneticEncoder(string value) => new PhoneticEncoder(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is PhoneticEncoder other && Equals(other); + /// + public bool Equals(PhoneticEncoder other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.Serialization.cs new file mode 100644 index 000000000000..e05b16f26d81 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class PhoneticTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PhoneticTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Encoder)) + { + writer.WritePropertyName("encoder"u8); + writer.WriteStringValue(Encoder.Value.ToString()); + } + if (Optional.IsDefined(ReplaceOriginalTokens)) + { + writer.WritePropertyName("replace"u8); + writer.WriteBooleanValue(ReplaceOriginalTokens.Value); + } + } + + PhoneticTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PhoneticTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePhoneticTokenFilter(document.RootElement, options); + } + + internal static PhoneticTokenFilter DeserializePhoneticTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + PhoneticEncoder? encoder = default; + bool? replace = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("encoder"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encoder = new PhoneticEncoder(property.Value.GetString()); + continue; + } + if (property.NameEquals("replace"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + replace = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PhoneticTokenFilter(odataType, name, serializedAdditionalRawData, encoder, replace); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PhoneticTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + PhoneticTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePhoneticTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PhoneticTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new PhoneticTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePhoneticTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.cs new file mode 100644 index 000000000000..230f16105f3e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PhoneticTokenFilter.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Create tokens for phonetic matches. This token filter is implemented using + /// Apache Lucene. + /// + public partial class PhoneticTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public PhoneticTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.PhoneticTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The phonetic encoder to use. Default is "metaphone". + /// + /// A value indicating whether encoded tokens should replace original tokens. If + /// false, encoded tokens are added as synonyms. Default is true. + /// + internal PhoneticTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, PhoneticEncoder? encoder, bool? replaceOriginalTokens) : base(odataType, name, serializedAdditionalRawData) + { + Encoder = encoder; + ReplaceOriginalTokens = replaceOriginalTokens; + } + + /// Initializes a new instance of for deserialization. + internal PhoneticTokenFilter() + { + } + + /// The phonetic encoder to use. Default is "metaphone". + public PhoneticEncoder? Encoder { get; set; } + /// + /// A value indicating whether encoded tokens should replace original tokens. If + /// false, encoded tokens are added as synonyms. Default is true. + /// + public bool? ReplaceOriginalTokens { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.Serialization.cs new file mode 100644 index 000000000000..6da18fec69df --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.Serialization.cs @@ -0,0 +1,279 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class PiiDetectionSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PiiDetectionSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode); + } + if (Optional.IsDefined(MinimumPrecision)) + { + writer.WritePropertyName("minimumPrecision"u8); + writer.WriteNumberValue(MinimumPrecision.Value); + } + if (Optional.IsDefined(MaskingMode)) + { + writer.WritePropertyName("maskingMode"u8); + writer.WriteStringValue(MaskingMode.Value.ToString()); + } + if (Optional.IsDefined(Mask)) + { + writer.WritePropertyName("maskingCharacter"u8); + writer.WriteStringValue(Mask); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + if (Optional.IsCollectionDefined(PiiCategories)) + { + writer.WritePropertyName("piiCategories"u8); + writer.WriteStartArray(); + foreach (var item in PiiCategories) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Domain)) + { + writer.WritePropertyName("domain"u8); + writer.WriteStringValue(Domain); + } + } + + Search.Documents.Indexes.Models.PiiDetectionSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PiiDetectionSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.PiiDetectionSkill.DeserializePiiDetectionSkill(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.PiiDetectionSkill DeserializePiiDetectionSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string defaultLanguageCode = default; + double? minimumPrecision = default; + Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode? maskingMode = default; + string maskingCharacter = default; + string modelVersion = default; + IList piiCategories = default; + string domain = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + defaultLanguageCode = property.Value.GetString(); + continue; + } + if (property.NameEquals("minimumPrecision"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumPrecision = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("maskingMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maskingMode = new Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("maskingCharacter"u8)) + { + maskingCharacter = property.Value.GetString(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("piiCategories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + piiCategories = array; + continue; + } + if (property.NameEquals("domain"u8)) + { + domain = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.PiiDetectionSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + minimumPrecision, + maskingMode, + maskingCharacter, + modelVersion, + piiCategories ?? new ChangeTrackingList(), + domain); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PiiDetectionSkill)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.PiiDetectionSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.PiiDetectionSkill.DeserializePiiDetectionSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.PiiDetectionSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.PiiDetectionSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.PiiDetectionSkill.DeserializePiiDetectionSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.cs new file mode 100644 index 000000000000..345fd12ecc9b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkill.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Using the Text Analytics API, extracts personal information from an input text + /// and gives you the option of masking it. + /// + public partial class PiiDetectionSkill : Search.Documents.SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public PiiDetectionSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.PIIDetectionSkill"; + PiiCategories = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + /// + /// A parameter that provides various ways to mask the personal information + /// detected in the input text. Default is 'none'. + /// + /// + /// The character used to mask the text if the maskingMode parameter is set to + /// replace. Default is '*'. + /// + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + /// A list of PII entity categories that should be extracted and masked. + /// + /// If specified, will set the PII domain to include only a subset of the entity + /// categories. Possible values include: 'phi', 'none'. Default is 'none'. + /// + internal PiiDetectionSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string defaultLanguageCode, double? minimumPrecision, Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode? maskingMode, string mask, string modelVersion, IList piiCategories, string domain) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + MinimumPrecision = minimumPrecision; + MaskingMode = maskingMode; + Mask = mask; + ModelVersion = modelVersion; + PiiCategories = piiCategories; + Domain = domain; + } + + /// Initializes a new instance of for deserialization. + internal PiiDetectionSkill() + { + } + /// + /// A value between 0 and 1 that be used to only include entities whose confidence + /// score is greater than the value specified. If not set (default), or if + /// explicitly set to null, all entities will be included. + /// + public double? MinimumPrecision { get; set; } + /// + /// A parameter that provides various ways to mask the personal information + /// detected in the input text. Default is 'none'. + /// + public Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode? MaskingMode { get; set; } + /// + /// The character used to mask the text if the maskingMode parameter is set to + /// replace. Default is '*'. + /// + public string Mask { get; set; } + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + /// A list of PII entity categories that should be extracted and masked. + public IList PiiCategories { get; } + /// + /// If specified, will set the PII domain to include only a subset of the entity + /// categories. Possible values include: 'phi', 'none'. Default is 'none'. + /// + public string Domain { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkillMaskingMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkillMaskingMode.cs new file mode 100644 index 000000000000..5ae743eb6172 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/PiiDetectionSkillMaskingMode.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// A string indicating what maskingMode to use to mask the personal information + /// detected in the input text. + /// + public readonly partial struct PiiDetectionSkillMaskingMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public PiiDetectionSkillMaskingMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoneValue = "none"; + private const string ReplaceValue = "replace"; + + /// No masking occurs and the maskedText output will not be returned. + public static Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode None { get; } = new Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode(NoneValue); + /// + /// Replaces the detected entities with the character given in the maskingCharacter + /// parameter. The character will be repeated to the length of the detected entity + /// so that the offsets will correctly correspond to both the input text as well as + /// the output maskedText. + /// + public static Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode Replace { get; } = new Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode(ReplaceValue); + /// Determines if two values are the same. + public static bool operator ==(Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode left, Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode left, Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode(string value) => new Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode other && Equals(other); + /// + public bool Equals(Search.Documents.Indexes.Models.PiiDetectionSkillMaskingMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.Serialization.cs new file mode 100644 index 000000000000..605e6486a17b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryAnswerResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryAnswerResult)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Score)) + { + writer.WritePropertyName("score"u8); + writer.WriteNumberValue(Score.Value); + } + if (Optional.IsDefined(Key)) + { + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + } + if (Optional.IsDefined(Text)) + { + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + } + if (Optional.IsDefined(Highlights)) + { + writer.WritePropertyName("highlights"u8); + writer.WriteStringValue(Highlights); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + QueryAnswerResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryAnswerResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryAnswerResult(document.RootElement, options); + } + + internal static QueryAnswerResult DeserializeQueryAnswerResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double? score = default; + string key = default; + string text = default; + string highlights = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("score"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + score = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("highlights"u8)) + { + highlights = property.Value.GetString(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new QueryAnswerResult(score, key, text, highlights, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryAnswerResult)} does not support writing '{options.Format}' format."); + } + } + + QueryAnswerResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryAnswerResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryAnswerResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryAnswerResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryAnswerResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.cs new file mode 100644 index 000000000000..70d594b527be --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerResult.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// An answer is a text passage extracted from the contents of the most relevant + /// documents that matched the query. Answers are extracted from the top search + /// results. Answer candidates are scored and the top answers are selected. + /// + public partial class QueryAnswerResult + { + /// Initializes a new instance of . + internal QueryAnswerResult() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// The score value represents how relevant the answer is to the query relative to + /// other answers returned for the query. + /// + /// The key of the document the answer was extracted from. + /// The text passage extracted from the document contents as the answer. + /// + /// Same text passage as in the Text property with highlighted text phrases most + /// relevant to the query. + /// + /// Additional Properties. + internal QueryAnswerResult(double? score, string key, string text, string highlights, IReadOnlyDictionary additionalProperties) + { + Score = score; + Key = key; + Text = text; + Highlights = highlights; + AdditionalProperties = additionalProperties; + } + + /// + /// The score value represents how relevant the answer is to the query relative to + /// other answers returned for the query. + /// + public double? Score { get; } + /// The key of the document the answer was extracted from. + public string Key { get; } + /// The text passage extracted from the document contents as the answer. + public string Text { get; } + /// + /// Same text passage as in the Text property with highlighted text phrases most + /// relevant to the query. + /// + public string Highlights { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerType.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerType.cs new file mode 100644 index 000000000000..024d98574cbd --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryAnswerType.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns answers extracted from key passages in the highest ranked documents. + /// The number of answers returned can be configured by appending the pipe + /// character `|` followed by the `count-` option after the + /// answers parameter value, such as `extractive|count-3`. Default count is 1. The + /// confidence threshold can be configured by appending the pipe character `|` + /// followed by the `threshold-` option after the answers + /// parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. + /// The maximum character length of answers can be configured by appending the pipe + /// character '|' followed by the 'count-', + /// such as 'extractive|maxcharlength-600'. + /// + public readonly partial struct QueryAnswerType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public QueryAnswerType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoneValue = "none"; + private const string ExtractiveValue = "extractive"; + + /// Do not return answers for the query. + public static QueryAnswerType None { get; } = new QueryAnswerType(NoneValue); + /// + /// Extracts answer candidates from the contents of the documents returned in + /// response to a query expressed as a question in natural language. + /// + public static QueryAnswerType Extractive { get; } = new QueryAnswerType(ExtractiveValue); + /// Determines if two values are the same. + public static bool operator ==(QueryAnswerType left, QueryAnswerType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(QueryAnswerType left, QueryAnswerType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator QueryAnswerType(string value) => new QueryAnswerType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is QueryAnswerType other && Equals(other); + /// + public bool Equals(QueryAnswerType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.Serialization.cs new file mode 100644 index 000000000000..71300ca7425a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryCaptionResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryCaptionResult)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Text)) + { + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + } + if (Optional.IsDefined(Highlights)) + { + writer.WritePropertyName("highlights"u8); + writer.WriteStringValue(Highlights); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + QueryCaptionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryCaptionResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryCaptionResult(document.RootElement, options); + } + + internal static QueryCaptionResult DeserializeQueryCaptionResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + string highlights = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("highlights"u8)) + { + highlights = property.Value.GetString(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new QueryCaptionResult(text, highlights, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryCaptionResult)} does not support writing '{options.Format}' format."); + } + } + + QueryCaptionResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryCaptionResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryCaptionResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryCaptionResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryCaptionResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.cs new file mode 100644 index 000000000000..0076b7e64301 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionResult.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Captions are the most representative passages from the document relatively to + /// the search query. They are often used as document summary. Captions are only + /// returned for queries of type `semantic`. + /// + public partial class QueryCaptionResult + { + /// Initializes a new instance of . + internal QueryCaptionResult() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// A representative text passage extracted from the document most relevant to the + /// search query. + /// + /// + /// Same text passage as in the Text property with highlighted phrases most + /// relevant to the query. + /// + /// Additional Properties. + internal QueryCaptionResult(string text, string highlights, IReadOnlyDictionary additionalProperties) + { + Text = text; + Highlights = highlights; + AdditionalProperties = additionalProperties; + } + + /// + /// A representative text passage extracted from the document most relevant to the + /// search query. + /// + public string Text { get; } + /// + /// Same text passage as in the Text property with highlighted phrases most + /// relevant to the query. + /// + public string Highlights { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionType.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionType.cs new file mode 100644 index 000000000000..326fe00b8887 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryCaptionType.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// This parameter is only valid if the query type is `semantic`. If set, the query + /// returns captions extracted from key passages in the highest ranked documents. + /// When Captions is set to `extractive`, highlighting is enabled by default, and + /// can be configured by appending the pipe character `|` followed by the + /// `highlight-` option, such as `extractive|highlight-true`. Defaults + /// to `None`. The maximum character length of captions can be configured by + /// appending the pipe character '|' followed by the 'count-', such as 'extractive|maxcharlength-600'. + /// + public readonly partial struct QueryCaptionType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public QueryCaptionType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoneValue = "none"; + private const string ExtractiveValue = "extractive"; + + /// Do not return captions for the query. + public static QueryCaptionType None { get; } = new QueryCaptionType(NoneValue); + /// + /// Extracts captions from the matching documents that contain passages relevant to + /// the search query. + /// + public static QueryCaptionType Extractive { get; } = new QueryCaptionType(ExtractiveValue); + /// Determines if two values are the same. + public static bool operator ==(QueryCaptionType left, QueryCaptionType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(QueryCaptionType left, QueryCaptionType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator QueryCaptionType(string value) => new QueryCaptionType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is QueryCaptionType other && Equals(other); + /// + public bool Equals(QueryCaptionType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryDebugMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryDebugMode.cs similarity index 86% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/QueryDebugMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/QueryDebugMode.cs index 28bc67685efc..e236110b44dc 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryDebugMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryDebugMode.cs @@ -8,9 +8,13 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { - /// Enables a debugging tool that can be used to further explore your search results. You can enable multiple debug modes simultaneously by separating them with a | character, for example: semantic|queryRewrites. + /// + /// Enables a debugging tool that can be used to further explore your search + /// results. You can enable multiple debug modes simultaneously by separating them + /// with a | character, for example: semantic|queryRewrites. + /// public readonly partial struct QueryDebugMode : IEquatable { private readonly string _value; @@ -34,7 +38,10 @@ public QueryDebugMode(string value) public static QueryDebugMode Semantic { get; } = new QueryDebugMode(SemanticValue); /// Allows the user to further explore their hybrid and vector query results. public static QueryDebugMode Vector { get; } = new QueryDebugMode(VectorValue); - /// Allows the user to explore the list of query rewrites generated for their search request. + /// + /// Allows the user to explore the list of query rewrites generated for their + /// search request. + /// public static QueryDebugMode QueryRewrites { get; } = new QueryDebugMode(QueryRewritesValue); /// Turn on all debug options. public static QueryDebugMode All { get; } = new QueryDebugMode(AllValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/QueryLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/QueryLanguage.cs index c34400175cc9..2cd9545566cd 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// The language of the query. public readonly partial struct QueryLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.Serialization.cs new file mode 100644 index 000000000000..7cda7f51c434 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryResultDocumentRerankerInput : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentRerankerInput)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Title)) + { + writer.WritePropertyName("title"u8); + writer.WriteStringValue(Title); + } + if (options.Format != "W" && Optional.IsDefined(Content)) + { + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + } + if (options.Format != "W" && Optional.IsDefined(Keywords)) + { + writer.WritePropertyName("keywords"u8); + writer.WriteStringValue(Keywords); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + QueryResultDocumentRerankerInput IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentRerankerInput)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryResultDocumentRerankerInput(document.RootElement, options); + } + + internal static QueryResultDocumentRerankerInput DeserializeQueryResultDocumentRerankerInput(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string title = default; + string content = default; + string keywords = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("title"u8)) + { + title = property.Value.GetString(); + continue; + } + if (property.NameEquals("content"u8)) + { + content = property.Value.GetString(); + continue; + } + if (property.NameEquals("keywords"u8)) + { + keywords = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new QueryResultDocumentRerankerInput(title, content, keywords, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryResultDocumentRerankerInput)} does not support writing '{options.Format}' format."); + } + } + + QueryResultDocumentRerankerInput IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentRerankerInput(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryResultDocumentRerankerInput)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryResultDocumentRerankerInput FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentRerankerInput(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.cs new file mode 100644 index 000000000000..4a15d7c144e8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentRerankerInput.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// The raw concatenated strings that were sent to the semantic enrichment process. + public partial class QueryResultDocumentRerankerInput + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal QueryResultDocumentRerankerInput() + { + } + + /// Initializes a new instance of . + /// The raw string for the title field that was used for semantic enrichment. + /// + /// The raw concatenated strings for the content fields that were used for semantic + /// enrichment. + /// + /// + /// The raw concatenated strings for the keyword fields that were used for semantic + /// enrichment. + /// + /// Keeps track of any properties unknown to the library. + internal QueryResultDocumentRerankerInput(string title, string content, string keywords, IDictionary serializedAdditionalRawData) + { + Title = title; + Content = content; + Keywords = keywords; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The raw string for the title field that was used for semantic enrichment. + public string Title { get; } + /// + /// The raw concatenated strings for the content fields that were used for semantic + /// enrichment. + /// + public string Content { get; } + /// + /// The raw concatenated strings for the keyword fields that were used for semantic + /// enrichment. + /// + public string Keywords { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.Serialization.cs new file mode 100644 index 000000000000..acd70fe24ae1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.Serialization.cs @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryResultDocumentSemanticField : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentSemanticField)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (options.Format != "W" && Optional.IsDefined(State)) + { + writer.WritePropertyName("state"u8); + writer.WriteStringValue(State.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + QueryResultDocumentSemanticField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentSemanticField)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryResultDocumentSemanticField(document.RootElement, options); + } + + internal static QueryResultDocumentSemanticField DeserializeQueryResultDocumentSemanticField(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + SemanticFieldState? state = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("state"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + state = new SemanticFieldState(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new QueryResultDocumentSemanticField(name, state, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryResultDocumentSemanticField)} does not support writing '{options.Format}' format."); + } + } + + QueryResultDocumentSemanticField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentSemanticField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryResultDocumentSemanticField)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryResultDocumentSemanticField FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentSemanticField(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.cs new file mode 100644 index 000000000000..0d4fdeca9a0e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSemanticField.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Description of fields that were sent to the semantic enrichment process, as + /// well as how they were used + /// + public partial class QueryResultDocumentSemanticField + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal QueryResultDocumentSemanticField() + { + } + + /// Initializes a new instance of . + /// The name of the field that was sent to the semantic enrichment process. + /// + /// The way the field was used for the semantic enrichment process (fully used, + /// partially used, or unused) + /// + /// Keeps track of any properties unknown to the library. + internal QueryResultDocumentSemanticField(string name, SemanticFieldState? state, IDictionary serializedAdditionalRawData) + { + Name = name; + State = state; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The name of the field that was sent to the semantic enrichment process. + public string Name { get; } + /// + /// The way the field was used for the semantic enrichment process (fully used, + /// partially used, or unused) + /// + public SemanticFieldState? State { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.Serialization.cs new file mode 100644 index 000000000000..7e0f1397dee1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.Serialization.cs @@ -0,0 +1,212 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryResultDocumentSubscores : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentSubscores)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Text)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(Text, options); + } + if (options.Format != "W" && Optional.IsCollectionDefined(Vectors)) + { + writer.WritePropertyName("vectors"u8); + writer.WriteStartArray(); + foreach (var item in Vectors) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStartObject(); + foreach (var item0 in item) + { + writer.WritePropertyName(item0.Key); + writer.WriteObjectValue(item0.Value, options); + } + writer.WriteEndObject(); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsDefined(DocumentBoost)) + { + writer.WritePropertyName("documentBoost"u8); + writer.WriteNumberValue(DocumentBoost.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + QueryResultDocumentSubscores IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryResultDocumentSubscores)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryResultDocumentSubscores(document.RootElement, options); + } + + internal static QueryResultDocumentSubscores DeserializeQueryResultDocumentSubscores(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TextResult text = default; + IReadOnlyList> vectors = default; + double? documentBoost = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + text = TextResult.DeserializeTextResult(property.Value, options); + continue; + } + if (property.NameEquals("vectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List> array = new List>(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + Dictionary dictionary = new Dictionary(); + foreach (var property0 in item.EnumerateObject()) + { + dictionary.Add(property0.Name, SingleVectorFieldResult.DeserializeSingleVectorFieldResult(property0.Value, options)); + } + array.Add(dictionary); + } + } + vectors = array; + continue; + } + if (property.NameEquals("documentBoost"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + documentBoost = property.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new QueryResultDocumentSubscores(text, vectors ?? new ChangeTrackingList>(), documentBoost, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryResultDocumentSubscores)} does not support writing '{options.Format}' format."); + } + } + + QueryResultDocumentSubscores IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentSubscores(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryResultDocumentSubscores)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryResultDocumentSubscores FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryResultDocumentSubscores(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.cs new file mode 100644 index 000000000000..7852875210bb --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryResultDocumentSubscores.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The breakdown of subscores between the text and vector query components of the + /// search query for this document. Each vector query is shown as a separate object + /// in the same order they were received. + /// + public partial class QueryResultDocumentSubscores + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal QueryResultDocumentSubscores() + { + Vectors = new ChangeTrackingList>(); + } + + /// Initializes a new instance of . + /// The BM25 or Classic score for the text portion of the query. + /// The vector similarity and @search.score values for each vector query. + /// The BM25 or Classic score for the text portion of the query. + /// Keeps track of any properties unknown to the library. + internal QueryResultDocumentSubscores(TextResult text, IReadOnlyList> vectors, double? documentBoost, IDictionary serializedAdditionalRawData) + { + Text = text; + Vectors = vectors; + DocumentBoost = documentBoost; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The BM25 or Classic score for the text portion of the query. + public TextResult Text { get; } + /// The vector similarity and @search.score values for each vector query. + public IReadOnlyList> Vectors { get; } + /// The BM25 or Classic score for the text portion of the query. + public double? DocumentBoost { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.Serialization.cs new file mode 100644 index 000000000000..a23cd739978c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryRewritesDebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryRewritesDebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Text)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(Text, options); + } + if (options.Format != "W" && Optional.IsCollectionDefined(Vectors)) + { + writer.WritePropertyName("vectors"u8); + writer.WriteStartArray(); + foreach (var item in Vectors) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + QueryRewritesDebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryRewritesDebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryRewritesDebugInfo(document.RootElement, options); + } + + internal static QueryRewritesDebugInfo DeserializeQueryRewritesDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + QueryRewritesValuesDebugInfo text = default; + IReadOnlyList vectors = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + text = QueryRewritesValuesDebugInfo.DeserializeQueryRewritesValuesDebugInfo(property.Value, options); + continue; + } + if (property.NameEquals("vectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(QueryRewritesValuesDebugInfo.DeserializeQueryRewritesValuesDebugInfo(item, options)); + } + vectors = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new QueryRewritesDebugInfo(text, vectors ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryRewritesDebugInfo)} does not support writing '{options.Format}' format."); + } + } + + QueryRewritesDebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryRewritesDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryRewritesDebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryRewritesDebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryRewritesDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.cs new file mode 100644 index 000000000000..73eb6fb7c223 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesDebugInfo.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains debugging information specific to query rewrites. + public partial class QueryRewritesDebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal QueryRewritesDebugInfo() + { + Vectors = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// List of query rewrites generated for the text query. + /// List of query rewrites generated for the vectorizable text queries. + /// Keeps track of any properties unknown to the library. + internal QueryRewritesDebugInfo(QueryRewritesValuesDebugInfo text, IReadOnlyList vectors, IDictionary serializedAdditionalRawData) + { + Text = text; + Vectors = vectors; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// List of query rewrites generated for the text query. + public QueryRewritesValuesDebugInfo Text { get; } + /// List of query rewrites generated for the vectorizable text queries. + public IReadOnlyList Vectors { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesType.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesType.cs similarity index 82% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesType.cs rename to sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesType.cs index 021ee136c23d..d0efccc4c909 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QueryRewritesType.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesType.cs @@ -8,9 +8,16 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { - /// This parameter is only valid if the query type is `semantic`. When QueryRewrites is set to `generative`, the query terms are sent to a generate model which will produce 10 (default) rewrites to help increase the recall of the request. The requested count can be configured by appending the pipe character `|` followed by the `count-<number of rewrites>` option, such as `generative|count-3`. Defaults to `None`. + /// + /// This parameter is only valid if the query type is `semantic`. When + /// QueryRewrites is set to `generative`, the query terms are sent to a generate + /// model which will produce 10 (default) rewrites to help increase the recall of + /// the request. The requested count can be configured by appending the pipe + /// character `|` followed by the `count-` option, such as + /// `generative|count-3`. Defaults to `None`. + /// public readonly partial struct QueryRewritesType : IEquatable { private readonly string _value; diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.Serialization.cs new file mode 100644 index 000000000000..2c76b68ed495 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.Serialization.cs @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class QueryRewritesValuesDebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryRewritesValuesDebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(InputQuery)) + { + writer.WritePropertyName("inputQuery"u8); + writer.WriteStringValue(InputQuery); + } + if (options.Format != "W" && Optional.IsCollectionDefined(Rewrites)) + { + writer.WritePropertyName("rewrites"u8); + writer.WriteStartArray(); + foreach (var item in Rewrites) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + QueryRewritesValuesDebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(QueryRewritesValuesDebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeQueryRewritesValuesDebugInfo(document.RootElement, options); + } + + internal static QueryRewritesValuesDebugInfo DeserializeQueryRewritesValuesDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string inputQuery = default; + IReadOnlyList rewrites = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("inputQuery"u8)) + { + inputQuery = property.Value.GetString(); + continue; + } + if (property.NameEquals("rewrites"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + rewrites = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new QueryRewritesValuesDebugInfo(inputQuery, rewrites ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(QueryRewritesValuesDebugInfo)} does not support writing '{options.Format}' format."); + } + } + + QueryRewritesValuesDebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryRewritesValuesDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(QueryRewritesValuesDebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static QueryRewritesValuesDebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeQueryRewritesValuesDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.cs new file mode 100644 index 000000000000..d16de16d264f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/QueryRewritesValuesDebugInfo.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains debugging information specific to query rewrites. + public partial class QueryRewritesValuesDebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal QueryRewritesValuesDebugInfo() + { + Rewrites = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// The input text to the generative query rewriting model. There may be cases + /// where the user query and the input to the generative model are not identical. + /// + /// List of query rewrites. + /// Keeps track of any properties unknown to the library. + internal QueryRewritesValuesDebugInfo(string inputQuery, IReadOnlyList rewrites, IDictionary serializedAdditionalRawData) + { + InputQuery = inputQuery; + Rewrites = rewrites; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The input text to the generative query rewriting model. There may be cases + /// where the user query and the input to the generative model are not identical. + /// + public string InputQuery { get; } + /// List of query rewrites. + public IReadOnlyList Rewrites { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/QuerySpellerType.cs b/sdk/search/Azure.Search.Documents/src/Generated/QuerySpellerType.cs similarity index 91% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/QuerySpellerType.cs rename to sdk/search/Azure.Search.Documents/src/Generated/QuerySpellerType.cs index 6dd03945ccc4..1ad102c60ba1 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/QuerySpellerType.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/QuerySpellerType.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// Improve search recall by spell-correcting individual search query terms. public readonly partial struct QuerySpellerType : IEquatable @@ -27,7 +27,10 @@ public QuerySpellerType(string value) /// Speller not enabled. public static QuerySpellerType None { get; } = new QuerySpellerType(NoneValue); - /// Speller corrects individual query terms using a static lexicon for the language specified by the queryLanguage parameter. + /// + /// Speller corrects individual query terms using a static lexicon for the language + /// specified by the queryLanguage parameter. + /// public static QuerySpellerType Lexicon { get; } = new QuerySpellerType(LexiconValue); /// Determines if two values are the same. public static bool operator ==(QuerySpellerType left, QuerySpellerType right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/RegexFlag.cs b/sdk/search/Azure.Search.Documents/src/Generated/RegexFlag.cs new file mode 100644 index 000000000000..67a949d96c28 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/RegexFlag.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Defines flags that can be combined to control how regular expressions are used + /// in the pattern analyzer and pattern tokenizer. + /// + public readonly partial struct RegexFlag : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public RegexFlag(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string CanonEqValue = "CANON_EQ"; + private const string CaseInsensitiveValue = "CASE_INSENSITIVE"; + private const string CommentsValue = "COMMENTS"; + private const string DotAllValue = "DOTALL"; + private const string LiteralValue = "LITERAL"; + private const string MultilineValue = "MULTILINE"; + private const string UnicodeCaseValue = "UNICODE_CASE"; + private const string UnixLinesValue = "UNIX_LINES"; + + /// Enables canonical equivalence. + public static Search.Documents.Indexes.Models.RegexFlag CanonEq { get; } = new Search.Documents.Indexes.Models.RegexFlag(CanonEqValue); + /// Enables case-insensitive matching. + public static Search.Documents.Indexes.Models.RegexFlag CaseInsensitive { get; } = new Search.Documents.Indexes.Models.RegexFlag(CaseInsensitiveValue); + /// Permits whitespace and comments in the pattern. + public static Search.Documents.Indexes.Models.RegexFlag Comments { get; } = new Search.Documents.Indexes.Models.RegexFlag(CommentsValue); + /// Enables dotall mode. + public static Search.Documents.Indexes.Models.RegexFlag DotAll { get; } = new Search.Documents.Indexes.Models.RegexFlag(DotAllValue); + /// Enables literal parsing of the pattern. + public static Search.Documents.Indexes.Models.RegexFlag Literal { get; } = new Search.Documents.Indexes.Models.RegexFlag(LiteralValue); + /// Enables multiline mode. + public static Search.Documents.Indexes.Models.RegexFlag Multiline { get; } = new Search.Documents.Indexes.Models.RegexFlag(MultilineValue); + /// Enables Unicode-aware case folding. + public static Search.Documents.Indexes.Models.RegexFlag UnicodeCase { get; } = new Search.Documents.Indexes.Models.RegexFlag(UnicodeCaseValue); + /// Enables Unix lines mode. + public static Search.Documents.Indexes.Models.RegexFlag UnixLines { get; } = new Search.Documents.Indexes.Models.RegexFlag(UnixLinesValue); + /// Determines if two values are the same. + public static bool operator ==(Search.Documents.Indexes.Models.RegexFlag left, Search.Documents.Indexes.Models.RegexFlag right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(Search.Documents.Indexes.Models.RegexFlag left, Search.Documents.Indexes.Models.RegexFlag right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator Search.Documents.Indexes.Models.RegexFlag(string value) => new Search.Documents.Indexes.Models.RegexFlag(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is Search.Documents.Indexes.Models.RegexFlag other && Equals(other); + /// + public bool Equals(Search.Documents.Indexes.Models.RegexFlag other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.Serialization.cs new file mode 100644 index 000000000000..d519adac094d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.Serialization.cs @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class RescoringOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(RescoringOptions)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(EnableRescoring)) + { + writer.WritePropertyName("enableRescoring"u8); + writer.WriteBooleanValue(EnableRescoring.Value); + } + if (Optional.IsDefined(DefaultOversampling)) + { + writer.WritePropertyName("defaultOversampling"u8); + writer.WriteNumberValue(DefaultOversampling.Value); + } + if (Optional.IsDefined(RescoreStorageMethod)) + { + writer.WritePropertyName("rescoreStorageMethod"u8); + writer.WriteStringValue(RescoreStorageMethod.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + RescoringOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(RescoringOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeRescoringOptions(document.RootElement, options); + } + + internal static RescoringOptions DeserializeRescoringOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? enableRescoring = default; + double? defaultOversampling = default; + VectorSearchCompressionRescoreStorageMethod? rescoreStorageMethod = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("enableRescoring"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableRescoring = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultOversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultOversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("rescoreStorageMethod"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rescoreStorageMethod = new VectorSearchCompressionRescoreStorageMethod(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new RescoringOptions(enableRescoring, defaultOversampling, rescoreStorageMethod, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(RescoringOptions)} does not support writing '{options.Format}' format."); + } + } + + RescoringOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeRescoringOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(RescoringOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static RescoringOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeRescoringOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.cs new file mode 100644 index 000000000000..372ee90d3003 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/RescoringOptions.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains the options for rescoring. + public partial class RescoringOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public RescoringOptions() + { + } + + /// Initializes a new instance of . + /// + /// If set to true, after the initial search on the compressed vectors, the + /// similarity scores are recalculated using the full-precision vectors. This will + /// improve recall at the expense of latency. + /// + /// + /// Default oversampling factor. Oversampling retrieves a greater set of potential + /// documents to offset the resolution loss due to quantization. This increases the + /// set of results that will be rescored on full-precision vectors. Minimum value + /// is 1, meaning no oversampling (1x). This parameter can only be set when 'enableRescoring' + /// is true. Higher values improve recall at the expense of latency. + /// + /// Controls the storage method for original vectors. This setting is immutable. + /// Keeps track of any properties unknown to the library. + internal RescoringOptions(bool? enableRescoring, double? defaultOversampling, VectorSearchCompressionRescoreStorageMethod? rescoreStorageMethod, IDictionary serializedAdditionalRawData) + { + EnableRescoring = enableRescoring; + DefaultOversampling = defaultOversampling; + RescoreStorageMethod = rescoreStorageMethod; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// If set to true, after the initial search on the compressed vectors, the + /// similarity scores are recalculated using the full-precision vectors. This will + /// improve recall at the expense of latency. + /// + public bool? EnableRescoring { get; set; } + /// + /// Default oversampling factor. Oversampling retrieves a greater set of potential + /// documents to offset the resolution loss due to quantization. This increases the + /// set of results that will be rescored on full-precision vectors. Minimum value + /// is 1, meaning no oversampling (1x). This parameter can only be set when 'enableRescoring' + /// is true. Higher values improve recall at the expense of latency. + /// + public double? DefaultOversampling { get; set; } + /// Controls the storage method for original vectors. This setting is immutable. + public VectorSearchCompressionRescoreStorageMethod? RescoreStorageMethod { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.Serialization.cs new file mode 100644 index 000000000000..d41a99dffcd4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.Serialization.cs @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + public partial class ResetDocumentOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResetDocumentOptions)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(DocumentKeys)) + { + writer.WritePropertyName("documentKeys"u8); + writer.WriteStartArray(); + foreach (var item in DocumentKeys) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(DataSourceDocumentIds)) + { + writer.WritePropertyName("datasourceDocumentIds"u8); + writer.WriteStartArray(); + foreach (var item in DataSourceDocumentIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ResetDocumentOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResetDocumentOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeResetDocumentOptions(document.RootElement, options); + } + + internal static ResetDocumentOptions DeserializeResetDocumentOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList documentKeys = default; + IList datasourceDocumentIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("documentKeys"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + documentKeys = array; + continue; + } + if (property.NameEquals("datasourceDocumentIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + datasourceDocumentIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ResetDocumentOptions(documentKeys ?? new ChangeTrackingList(), datasourceDocumentIds ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ResetDocumentOptions)} does not support writing '{options.Format}' format."); + } + } + + ResetDocumentOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeResetDocumentOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ResetDocumentOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ResetDocumentOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeResetDocumentOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.cs new file mode 100644 index 000000000000..8073e9b9d4ce --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ResetDocumentOptions.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// The type of the keysOrIds. + public partial class ResetDocumentOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ResetDocumentOptions() + { + DocumentKeys = new ChangeTrackingList(); + DataSourceDocumentIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// document keys to be reset. + /// datasource document identifiers to be reset. + /// Keeps track of any properties unknown to the library. + internal ResetDocumentOptions(IList documentKeys, IList dataSourceDocumentIds, IDictionary serializedAdditionalRawData) + { + DocumentKeys = documentKeys; + DataSourceDocumentIds = dataSourceDocumentIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// document keys to be reset. + public IList DocumentKeys { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.Serialization.cs new file mode 100644 index 000000000000..bb01559b0124 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.Serialization.cs @@ -0,0 +1,159 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + public partial class ResetSkillsOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResetSkillsOptions)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(SkillNameList)) + { + writer.WritePropertyName("skillNames"u8); + writer.WriteStartArray(); + foreach (var item in SkillNameList) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ResetSkillsOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResetSkillsOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeResetSkillsOptions(document.RootElement, options); + } + + internal static ResetSkillsOptions DeserializeResetSkillsOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList skillNames = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("skillNames"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + skillNames = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ResetSkillsOptions(skillNames ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ResetSkillsOptions)} does not support writing '{options.Format}' format."); + } + } + + ResetSkillsOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeResetSkillsOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ResetSkillsOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ResetSkillsOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeResetSkillsOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.cs new file mode 100644 index 000000000000..80f8b307b4b7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ResetSkillsOptions.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// The type of the skill names. + public partial class ResetSkillsOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ResetSkillsOptions() + { + SkillNameList = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// the names of skills to be reset. + /// Keeps track of any properties unknown to the library. + internal ResetSkillsOptions(IList skillNameList, IDictionary serializedAdditionalRawData) + { + SkillNameList = skillNameList; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// the names of skills to be reset. + public IList SkillNameList { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.Serialization.cs new file mode 100644 index 000000000000..7d01b8d4d530 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.Serialization.cs @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ScalarQuantizationCompression : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScalarQuantizationCompression)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("scalarQuantizationParameters"u8); + writer.WriteObjectValue(Parameters, options); + } + } + + ScalarQuantizationCompression IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScalarQuantizationCompression)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeScalarQuantizationCompression(document.RootElement, options); + } + + internal static ScalarQuantizationCompression DeserializeScalarQuantizationCompression(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ScalarQuantizationParameters scalarQuantizationParameters = default; + string name = default; + bool? rerankWithOriginalVectors = default; + double? defaultOversampling = default; + RescoringOptions rescoringOptions = default; + int? truncationDimension = default; + VectorSearchCompressionKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("scalarQuantizationParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + scalarQuantizationParameters = ScalarQuantizationParameters.DeserializeScalarQuantizationParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("rerankWithOriginalVectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rerankWithOriginalVectors = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultOversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultOversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("rescoringOptions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value, options); + continue; + } + if (property.NameEquals("truncationDimension"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + truncationDimension = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchCompressionKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ScalarQuantizationCompression( + name, + rerankWithOriginalVectors, + defaultOversampling, + rescoringOptions, + truncationDimension, + kind, + serializedAdditionalRawData, + scalarQuantizationParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ScalarQuantizationCompression)} does not support writing '{options.Format}' format."); + } + } + + ScalarQuantizationCompression IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScalarQuantizationCompression(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ScalarQuantizationCompression)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ScalarQuantizationCompression FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScalarQuantizationCompression(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.cs new file mode 100644 index 000000000000..ee7502bacfc3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationCompression.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the scalar quantization compression + /// method used during indexing and querying. + /// + public partial class ScalarQuantizationCompression : VectorSearchCompression + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + public ScalarQuantizationCompression(string compressionName) : base(compressionName) + { + Argument.AssertNotNull(compressionName, nameof(compressionName)); + + Kind = VectorSearchCompressionKind.ScalarQuantization; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// + /// If set to true, once the ordered set of results calculated using compressed + /// vectors are obtained, they will be reranked again by recalculating the + /// full-precision similarity scores. This will improve recall at the expense of + /// latency. + /// + /// + /// Default oversampling factor. Oversampling will internally request more + /// documents (specified by this multiplier) in the initial search. This increases + /// the set of results that will be reranked using recomputed similarity scores + /// from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). + /// This parameter can only be set when rerankWithOriginalVectors is true. Higher + /// values improve recall at the expense of latency. + /// + /// Contains the options for rescoring. + /// + /// The number of dimensions to truncate the vectors to. Truncating the vectors + /// reduces the size of the vectors and the amount of data that needs to be + /// transferred during search. This can save storage cost and improve search + /// performance at the expense of recall. It should be only used for embeddings + /// trained with Matryoshka Representation Learning (MRL) such as OpenAI + /// text-embedding-3-large (small). The default value is null, which means no + /// truncation. + /// + /// Type of VectorSearchCompression. + /// Keeps track of any properties unknown to the library. + /// Contains the parameters specific to Scalar Quantization. + internal ScalarQuantizationCompression(string compressionName, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension, VectorSearchCompressionKind kind, IDictionary serializedAdditionalRawData, ScalarQuantizationParameters parameters) : base(compressionName, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension, kind, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal ScalarQuantizationCompression() + { + } + + /// Contains the parameters specific to Scalar Quantization. + public ScalarQuantizationParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.Serialization.cs new file mode 100644 index 000000000000..a6d83802b672 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ScalarQuantizationParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScalarQuantizationParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(QuantizedDataType)) + { + writer.WritePropertyName("quantizedDataType"u8); + writer.WriteStringValue(QuantizedDataType.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ScalarQuantizationParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScalarQuantizationParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeScalarQuantizationParameters(document.RootElement, options); + } + + internal static ScalarQuantizationParameters DeserializeScalarQuantizationParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + VectorSearchCompressionTarget? quantizedDataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("quantizedDataType"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + quantizedDataType = new VectorSearchCompressionTarget(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ScalarQuantizationParameters(quantizedDataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ScalarQuantizationParameters)} does not support writing '{options.Format}' format."); + } + } + + ScalarQuantizationParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScalarQuantizationParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ScalarQuantizationParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ScalarQuantizationParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScalarQuantizationParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.cs new file mode 100644 index 000000000000..ef8b3e3d5231 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScalarQuantizationParameters.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains the parameters specific to Scalar Quantization. + public partial class ScalarQuantizationParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ScalarQuantizationParameters() + { + } + + /// Initializes a new instance of . + /// The quantized data type of compressed vector values. + /// Keeps track of any properties unknown to the library. + internal ScalarQuantizationParameters(VectorSearchCompressionTarget? quantizedDataType, IDictionary serializedAdditionalRawData) + { + QuantizedDataType = quantizedDataType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The quantized data type of compressed vector values. + public VectorSearchCompressionTarget? QuantizedDataType { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.Serialization.cs new file mode 100644 index 000000000000..110ab5399b60 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownScoringFunction))] + public partial class ScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringFunction)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("fieldName"u8); + writer.WriteStringValue(FieldName); + writer.WritePropertyName("boost"u8); + writer.WriteNumberValue(Boost); + if (Optional.IsDefined(Interpolation)) + { + writer.WritePropertyName("interpolation"u8); + writer.WriteStringValue(Interpolation.Value.ToString()); + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeScoringFunction(document.RootElement, options); + } + + internal static ScoringFunction DeserializeScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "distance": return DistanceScoringFunction.DeserializeDistanceScoringFunction(element, options); + case "freshness": return FreshnessScoringFunction.DeserializeFreshnessScoringFunction(element, options); + case "magnitude": return MagnitudeScoringFunction.DeserializeMagnitudeScoringFunction(element, options); + case "tag": return TagScoringFunction.DeserializeTagScoringFunction(element, options); + } + } + return UnknownScoringFunction.DeserializeUnknownScoringFunction(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + ScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScoringFunction(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.cs new file mode 100644 index 000000000000..d7a06ea1c1e8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunction.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for functions that can modify document scores during ranking. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public abstract partial class ScoringFunction + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// is null. + protected ScoringFunction(string fieldName, double boost) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + + FieldName = fieldName; + Boost = boost; + } + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + internal ScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData) + { + FieldName = fieldName; + Boost = boost; + Interpolation = interpolation; + Type = type; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ScoringFunction() + { + } + + /// The name of the field used as input to the scoring function. + public string FieldName { get; set; } + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + public double Boost { get; set; } + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + public ScoringFunctionInterpolation? Interpolation { get; set; } + /// Type of ScoringFunction. + internal string Type { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionAggregation.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionAggregation.cs new file mode 100644 index 000000000000..4ca406abce9d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionAggregation.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Defines the aggregation function used to combine the results of all the scoring + /// functions in a scoring profile. + /// + public readonly partial struct ScoringFunctionAggregation : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ScoringFunctionAggregation(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string SumValue = "sum"; + private const string AverageValue = "average"; + private const string MinimumValue = "minimum"; + private const string MaximumValue = "maximum"; + private const string FirstMatchingValue = "firstMatching"; + + /// Boost scores by the sum of all scoring function results. + public static ScoringFunctionAggregation Sum { get; } = new ScoringFunctionAggregation(SumValue); + /// Boost scores by the average of all scoring function results. + public static ScoringFunctionAggregation Average { get; } = new ScoringFunctionAggregation(AverageValue); + /// Boost scores by the minimum of all scoring function results. + public static ScoringFunctionAggregation Minimum { get; } = new ScoringFunctionAggregation(MinimumValue); + /// Boost scores by the maximum of all scoring function results. + public static ScoringFunctionAggregation Maximum { get; } = new ScoringFunctionAggregation(MaximumValue); + /// Boost scores using the first applicable scoring function in the scoring profile. + public static ScoringFunctionAggregation FirstMatching { get; } = new ScoringFunctionAggregation(FirstMatchingValue); + /// Determines if two values are the same. + public static bool operator ==(ScoringFunctionAggregation left, ScoringFunctionAggregation right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ScoringFunctionAggregation left, ScoringFunctionAggregation right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ScoringFunctionAggregation(string value) => new ScoringFunctionAggregation(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ScoringFunctionAggregation other && Equals(other); + /// + public bool Equals(ScoringFunctionAggregation other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionInterpolation.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionInterpolation.cs new file mode 100644 index 000000000000..564b04a4d110 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringFunctionInterpolation.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Defines the function used to interpolate score boosting across a range of + /// documents. + /// + public readonly partial struct ScoringFunctionInterpolation : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ScoringFunctionInterpolation(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LinearValue = "linear"; + private const string ConstantValue = "constant"; + private const string QuadraticValue = "quadratic"; + private const string LogarithmicValue = "logarithmic"; + + /// + /// Boosts scores by a linearly decreasing amount. This is the default + /// interpolation for scoring functions. + /// + public static ScoringFunctionInterpolation Linear { get; } = new ScoringFunctionInterpolation(LinearValue); + /// Boosts scores by a constant factor. + public static ScoringFunctionInterpolation Constant { get; } = new ScoringFunctionInterpolation(ConstantValue); + /// + /// Boosts scores by an amount that decreases quadratically. Boosts decrease slowly + /// for higher scores, and more quickly as the scores decrease. This interpolation + /// option is not allowed in tag scoring functions. + /// + public static ScoringFunctionInterpolation Quadratic { get; } = new ScoringFunctionInterpolation(QuadraticValue); + /// + /// Boosts scores by an amount that decreases logarithmically. Boosts decrease + /// quickly for higher scores, and more slowly as the scores decrease. This + /// interpolation option is not allowed in tag scoring functions. + /// + public static ScoringFunctionInterpolation Logarithmic { get; } = new ScoringFunctionInterpolation(LogarithmicValue); + /// Determines if two values are the same. + public static bool operator ==(ScoringFunctionInterpolation left, ScoringFunctionInterpolation right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ScoringFunctionInterpolation left, ScoringFunctionInterpolation right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ScoringFunctionInterpolation(string value) => new ScoringFunctionInterpolation(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ScoringFunctionInterpolation other && Equals(other); + /// + public bool Equals(ScoringFunctionInterpolation other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.Serialization.cs new file mode 100644 index 000000000000..21672c54dac8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ScoringProfile : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringProfile)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(TextWeights)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(TextWeights, options); + } + if (Optional.IsCollectionDefined(Functions)) + { + writer.WritePropertyName("functions"u8); + writer.WriteStartArray(); + foreach (var item in Functions) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(FunctionAggregation)) + { + writer.WritePropertyName("functionAggregation"u8); + writer.WriteStringValue(FunctionAggregation.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + ScoringProfile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringProfile)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeScoringProfile(document.RootElement, options); + } + + internal static ScoringProfile DeserializeScoringProfile(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + TextWeights text = default; + IList functions = default; + ScoringFunctionAggregation? functionAggregation = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("text"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + text = TextWeights.DeserializeTextWeights(property.Value, options); + continue; + } + if (property.NameEquals("functions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(ScoringFunction.DeserializeScoringFunction(item, options)); + } + functions = array; + continue; + } + if (property.NameEquals("functionAggregation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + functionAggregation = new ScoringFunctionAggregation(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ScoringProfile(name, text, functions ?? new ChangeTrackingList(), functionAggregation, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ScoringProfile)} does not support writing '{options.Format}' format."); + } + } + + ScoringProfile IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScoringProfile(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ScoringProfile)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ScoringProfile FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScoringProfile(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.cs new file mode 100644 index 000000000000..b50dd750434b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringProfile.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Defines parameters for a search index that influence scoring in search queries. + public partial class ScoringProfile + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the scoring profile. + /// is null. + public ScoringProfile(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + Functions = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The name of the scoring profile. + /// Parameters that boost scoring based on text matches in certain index fields. + /// + /// The collection of functions that influence the scoring of documents. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// A value indicating how the results of individual scoring functions should be + /// combined. Defaults to "Sum". Ignored if there are no scoring functions. + /// + /// Keeps track of any properties unknown to the library. + internal ScoringProfile(string name, TextWeights textWeights, IList functions, ScoringFunctionAggregation? functionAggregation, IDictionary serializedAdditionalRawData) + { + Name = name; + TextWeights = textWeights; + Functions = functions; + FunctionAggregation = functionAggregation; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ScoringProfile() + { + } + + /// The name of the scoring profile. + public string Name { get; set; } + /// Parameters that boost scoring based on text matches in certain index fields. + public TextWeights TextWeights { get; set; } + /// + /// The collection of functions that influence the scoring of documents. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public IList Functions { get; } + /// + /// A value indicating how the results of individual scoring functions should be + /// combined. Defaults to "Sum". Ignored if there are no scoring functions. + /// + public ScoringFunctionAggregation? FunctionAggregation { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ScoringStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/ScoringStatistics.cs new file mode 100644 index 000000000000..c9a678d47a59 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ScoringStatistics.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. The default is 'local'. Use 'global' to aggregate scoring statistics + /// globally before scoring. Using global scoring statistics can increase latency + /// of search queries. + /// + public readonly partial struct ScoringStatistics : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ScoringStatistics(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LocalValue = "local"; + private const string GlobalValue = "global"; + + /// The scoring statistics will be calculated locally for lower latency. + public static ScoringStatistics Local { get; } = new ScoringStatistics(LocalValue); + /// The scoring statistics will be calculated globally for more consistent scoring. + public static ScoringStatistics Global { get; } = new ScoringStatistics(GlobalValue); + /// Determines if two values are the same. + public static bool operator ==(ScoringStatistics left, ScoringStatistics right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ScoringStatistics left, ScoringStatistics right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ScoringStatistics(string value) => new ScoringStatistics(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ScoringStatistics other && Equals(other); + /// + public bool Equals(ScoringStatistics other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.Serialization.cs new file mode 100644 index 000000000000..258a375f7479 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.Serialization.cs @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchAlias : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchAlias)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("indexes"u8); + writer.WriteStartArray(); + foreach (var item in Indexes) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(ETag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(ETag); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchAlias IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchAlias)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchAlias(document.RootElement, options); + } + + internal static SearchAlias DeserializeSearchAlias(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + IList indexes = default; + string odataEtag = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("indexes"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + indexes = array; + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchAlias(name, indexes, odataEtag, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchAlias)} does not support writing '{options.Format}' format."); + } + } + + SearchAlias IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchAlias(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchAlias)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchAlias FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchAlias(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.cs new file mode 100644 index 000000000000..59b492b12a4c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchAlias.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Represents an index alias, which describes a mapping from the alias name to an + /// index. The alias name can be used in place of the index name for supported + /// operations. + /// + public partial class SearchAlias + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the alias. + /// The name of the index this alias maps to. Only one index name may be specified. + /// or is null. + public SearchAlias(string name, IEnumerable indexes) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(indexes, nameof(indexes)); + + Name = name; + Indexes = indexes.ToList(); + } + + /// Initializes a new instance of . + /// The name of the alias. + /// The name of the index this alias maps to. Only one index name may be specified. + /// The ETag of the alias. + /// Keeps track of any properties unknown to the library. + internal SearchAlias(string name, IList indexes, string eTag, IDictionary serializedAdditionalRawData) + { + Name = name; + Indexes = indexes; + ETag = eTag; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchAlias() + { + } + + /// The name of the alias. + public string Name { get; set; } + /// The name of the index this alias maps to. Only one index name may be specified. + public IList Indexes { get; } + /// The ETag of the alias. + public string ETag { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchClient.cs new file mode 100644 index 000000000000..f68947bdb1dc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchClient.cs @@ -0,0 +1,335 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Autorest.CSharp.Core; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.Search.Documents +{ + // Data plane generated client. + /// + /// Client that can be used to manage and query indexes and documents, as well as + /// manage other resources, on a search service. + /// + public partial class SearchClient + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// Initializes a new instance of SearchClient. + /// Service host. + /// A credential used to authenticate to an Azure Service. + /// or is null. + public SearchClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new SearchClientOptions()) + { + } + + /// Initializes a new instance of SearchClient. + /// Service host. + /// A credential used to authenticate to an Azure Service. + /// or is null. + public SearchClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new SearchClientOptions()) + { + } + + /// Initializes a new instance of SearchClient. + /// Service host. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public SearchClient(Uri endpoint, AzureKeyCredential credential, SearchClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new SearchClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of SearchClient. + /// Service host. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public SearchClient(Uri endpoint, TokenCredential credential, SearchClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new SearchClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Gets service level statistics for a search service. + /// The cancellation token to use. + /// + public virtual async Task> GetServiceStatisticsAsync(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetServiceStatisticsAsync(context).ConfigureAwait(false); + return Response.FromValue(SearchServiceStatistics.FromResponse(response), response); + } + + /// Gets service level statistics for a search service. + /// The cancellation token to use. + /// + public virtual Response GetServiceStatistics(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetServiceStatistics(context); + return Response.FromValue(SearchServiceStatistics.FromResponse(response), response); + } + + /// + /// [Protocol Method] Gets service level statistics for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetServiceStatisticsAsync(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("SearchClient.GetServiceStatistics"); + scope.Start(); + try + { + using HttpMessage message = CreateGetServiceStatisticsRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Gets service level statistics for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetServiceStatistics(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("SearchClient.GetServiceStatistics"); + scope.Start(); + try + { + using HttpMessage message = CreateGetServiceStatisticsRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves a summary of statistics for all indexes in the search service. + /// The cancellation token to use. + /// + public virtual AsyncPageable GetIndexStatsSummaryAsync(CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexStatsSummaryRequest(context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => IndexStatisticsSummary.DeserializeIndexStatisticsSummary(e), ClientDiagnostics, _pipeline, "SearchClient.GetIndexStatsSummary", "value", null, context); + } + + /// Retrieves a summary of statistics for all indexes in the search service. + /// The cancellation token to use. + /// + public virtual Pageable GetIndexStatsSummary(CancellationToken cancellationToken = default) + { + RequestContext context = cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexStatsSummaryRequest(context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => IndexStatisticsSummary.DeserializeIndexStatisticsSummary(e), ClientDiagnostics, _pipeline, "SearchClient.GetIndexStatsSummary", "value", null, context); + } + + /// + /// [Protocol Method] Retrieves a summary of statistics for all indexes in the search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual AsyncPageable GetIndexStatsSummaryAsync(RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexStatsSummaryRequest(context); + return GeneratorPageableHelpers.CreateAsyncPageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "SearchClient.GetIndexStatsSummary", "value", null, context); + } + + /// + /// [Protocol Method] Retrieves a summary of statistics for all indexes in the search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The from the service containing a list of objects. Details of the body schema for each item in the collection are in the Remarks section below. + /// + public virtual Pageable GetIndexStatsSummary(RequestContext context) + { + HttpMessage FirstPageRequest(int? pageSizeHint) => CreateGetIndexStatsSummaryRequest(context); + return GeneratorPageableHelpers.CreatePageable(FirstPageRequest, null, e => BinaryData.FromString(e.GetRawText()), ClientDiagnostics, _pipeline, "SearchClient.GetIndexStatsSummary", "value", null, context); + } + + private DataSources _cachedDataSources; + private Indexers _cachedIndexers; + private Skillsets _cachedSkillsets; + private SynonymMaps _cachedSynonymMaps; + private Search.Documents.Indexes _cachedIndexes; + private Aliases _cachedAliases; + private Documents _cachedDocuments; + + /// Initializes a new instance of DataSources. + public virtual DataSources GetDataSourcesClient() + { + return Volatile.Read(ref _cachedDataSources) ?? Interlocked.CompareExchange(ref _cachedDataSources, new DataSources(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedDataSources; + } + + /// Initializes a new instance of Indexers. + public virtual Indexers GetIndexersClient() + { + return Volatile.Read(ref _cachedIndexers) ?? Interlocked.CompareExchange(ref _cachedIndexers, new Indexers(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedIndexers; + } + + /// Initializes a new instance of Skillsets. + public virtual Skillsets GetSkillsetsClient() + { + return Volatile.Read(ref _cachedSkillsets) ?? Interlocked.CompareExchange(ref _cachedSkillsets, new Skillsets(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedSkillsets; + } + + /// Initializes a new instance of SynonymMaps. + public virtual SynonymMaps GetSynonymMapsClient() + { + return Volatile.Read(ref _cachedSynonymMaps) ?? Interlocked.CompareExchange(ref _cachedSynonymMaps, new SynonymMaps(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedSynonymMaps; + } + + /// Initializes a new instance of Indexes. + public virtual Search.Documents.Indexes GetIndexesClient() + { + return Volatile.Read(ref _cachedIndexes) ?? Interlocked.CompareExchange(ref _cachedIndexes, new Search.Documents.Indexes(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedIndexes; + } + + /// Initializes a new instance of Aliases. + public virtual Aliases GetAliasesClient() + { + return Volatile.Read(ref _cachedAliases) ?? Interlocked.CompareExchange(ref _cachedAliases, new Aliases(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedAliases; + } + + /// Initializes a new instance of Documents. + public virtual Documents GetDocumentsClient() + { + return Volatile.Read(ref _cachedDocuments) ?? Interlocked.CompareExchange(ref _cachedDocuments, new Documents(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, _apiVersion), null) ?? _cachedDocuments; + } + + internal HttpMessage CreateGetServiceStatisticsRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/servicestats", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetIndexStatsSummaryRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/indexstats", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchClientOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchClientOptions.cs new file mode 100644 index 000000000000..4f4c9ada56b5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchClientOptions.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure.Core; + +namespace Azure.Search.Documents +{ + /// Client options for SearchClient. + public partial class SearchClientOptions : ClientOptions + { + /// The version of the service to use. + internal enum ServiceVersion + { + /// Service version "2024-07-01". + V2024_07_01 = 1, + /// Service version "2025-03-01-preview". + V2025_03_01_Preview = 2, + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsClientBuilderExtensions.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsClientBuilderExtensions.cs new file mode 100644 index 000000000000..941db21e8ee6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsClientBuilderExtensions.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure; +using Azure.Core.Extensions; +using Azure.Search.Documents; + +namespace Microsoft.Extensions.Azure +{ + /// Extension methods to add to client builder. + public static partial class SearchDocumentsClientBuilderExtensions + { + /// Registers a instance. + /// The builder to register with. + /// Service host. + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddSearchClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new SearchClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// Service host. + public static IAzureClientBuilder AddSearchClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new SearchClient(endpoint, cred, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddSearchClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.Serialization.cs new file mode 100644 index 000000000000..8dc219b4e883 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.Serialization.cs @@ -0,0 +1,353 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class SearchDocumentsResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchDocumentsResult)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Count)) + { + writer.WritePropertyName("@odata.count"u8); + writer.WriteNumberValue(Count.Value); + } + if (Optional.IsDefined(Coverage)) + { + writer.WritePropertyName("@search.coverage"u8); + writer.WriteNumberValue(Coverage.Value); + } + if (Optional.IsCollectionDefined(Facets)) + { + writer.WritePropertyName("@search.facets"u8); + writer.WriteStartObject(); + foreach (var item in Facets) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStartArray(); + foreach (var item0 in item.Value) + { + writer.WriteObjectValue(item0, options); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Answers)) + { + writer.WritePropertyName("@search.answers"u8); + writer.WriteStartArray(); + foreach (var item in Answers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsDefined(DebugInfo)) + { + writer.WritePropertyName("@search.debug"u8); + writer.WriteObjectValue(DebugInfo, options); + } + if (Optional.IsDefined(NextPageParameters)) + { + writer.WritePropertyName("@search.nextPageParameters"u8); + writer.WriteObjectValue(NextPageParameters, options); + } + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Results) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(NextLink)) + { + writer.WritePropertyName("@odata.nextLink"u8); + writer.WriteStringValue(NextLink); + } + if (Optional.IsDefined(SemanticPartialResponseReason)) + { + writer.WritePropertyName("@search.semanticPartialResponseReason"u8); + writer.WriteStringValue(SemanticPartialResponseReason.Value.ToString()); + } + if (Optional.IsDefined(SemanticPartialResponseType)) + { + writer.WritePropertyName("@search.semanticPartialResponseType"u8); + writer.WriteStringValue(SemanticPartialResponseType.Value.ToString()); + } + if (options.Format != "W" && Optional.IsDefined(SemanticQueryRewritesResultType)) + { + writer.WritePropertyName("@search.semanticQueryRewritesResultType"u8); + writer.WriteStringValue(SemanticQueryRewritesResultType.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchDocumentsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchDocumentsResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchDocumentsResult(document.RootElement, options); + } + + internal static SearchDocumentsResult DeserializeSearchDocumentsResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long? odataCount = default; + double? searchCoverage = default; + IReadOnlyDictionary> searchFacets = default; + IReadOnlyList searchAnswers = default; + DebugInfo searchDebug = default; + SearchOptions searchNextPageParameters = default; + IReadOnlyList value = default; + string odataNextLink = default; + SemanticErrorReason? searchSemanticPartialResponseReason = default; + SemanticSearchResultsType? searchSemanticPartialResponseType = default; + SemanticQueryRewritesResultType? searchSemanticQueryRewritesResultType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.count"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + odataCount = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("@search.coverage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchCoverage = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("@search.facets"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary> dictionary = new Dictionary>(); + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property0.Name, null); + } + else + { + List array = new List(); + foreach (var item in property0.Value.EnumerateArray()) + { + array.Add(Search.Documents.FacetResult.DeserializeFacetResult(item, options)); + } + dictionary.Add(property0.Name, array); + } + } + searchFacets = dictionary; + continue; + } + if (property.NameEquals("@search.answers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(QueryAnswerResult.DeserializeQueryAnswerResult(item, options)); + } + searchAnswers = array; + continue; + } + if (property.NameEquals("@search.debug"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchDebug = DebugInfo.DeserializeDebugInfo(property.Value, options); + continue; + } + if (property.NameEquals("@search.nextPageParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchNextPageParameters = SearchOptions.DeserializeSearchOptions(property.Value, options); + continue; + } + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchResult.DeserializeSearchResult(item, options)); + } + value = array; + continue; + } + if (property.NameEquals("@odata.nextLink"u8)) + { + odataNextLink = property.Value.GetString(); + continue; + } + if (property.NameEquals("@search.semanticPartialResponseReason"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchSemanticPartialResponseReason = new SemanticErrorReason(property.Value.GetString()); + continue; + } + if (property.NameEquals("@search.semanticPartialResponseType"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchSemanticPartialResponseType = new SemanticSearchResultsType(property.Value.GetString()); + continue; + } + if (property.NameEquals("@search.semanticQueryRewritesResultType"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchSemanticQueryRewritesResultType = new SemanticQueryRewritesResultType(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchDocumentsResult( + odataCount, + searchCoverage, + searchFacets ?? new ChangeTrackingDictionary>(), + searchAnswers ?? new ChangeTrackingList(), + searchDebug, + searchNextPageParameters, + value, + odataNextLink, + searchSemanticPartialResponseReason, + searchSemanticPartialResponseType, + searchSemanticQueryRewritesResultType, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchDocumentsResult)} does not support writing '{options.Format}' format."); + } + } + + SearchDocumentsResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchDocumentsResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchDocumentsResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchDocumentsResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchDocumentsResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.cs new file mode 100644 index 000000000000..c5b083d268d1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchDocumentsResult.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents.Models +{ + /// Response containing search results from an index. + internal partial class SearchDocumentsResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The sequence of results returned by the query. + /// is null. + internal SearchDocumentsResult(IEnumerable results) + { + Argument.AssertNotNull(results, nameof(results)); + + Facets = new ChangeTrackingDictionary>(); + Answers = new ChangeTrackingList(); + Results = results.ToList(); + } + + /// Initializes a new instance of . + /// + /// The total count of results found by the search operation, or null if the count + /// was not requested. If present, the count may be greater than the number of + /// results in this response. This can happen if you use the $top or $skip + /// parameters, or if the query can't return all the requested documents in a + /// single response. + /// + /// + /// A value indicating the percentage of the index that was included in the query, + /// or null if minimumCoverage was not specified in the request. + /// + /// + /// The facet query results for the search operation, organized as a collection of + /// buckets for each faceted field; null if the query did not include any facet + /// expressions. + /// + /// + /// The answers query results for the search operation; null if the answers query + /// parameter was not specified or set to 'none'. + /// + /// Debug information that applies to the search results as a whole. + /// + /// Continuation JSON payload returned when the query can't return all the + /// requested results in a single response. You can use this JSON along with + /// + /// The sequence of results returned by the query. + /// + /// Continuation URL returned when the query can't return all the requested results + /// in a single response. You can use this URL to formulate another GET or POST + /// Search request to get the next part of the search response. Make sure to use + /// the same verb (GET or POST) as the request that produced this response. + /// + /// Reason that a partial response was returned for a semantic ranking request. + /// Type of partial response that was returned for a semantic ranking request. + /// Type of query rewrite that was used to retrieve documents. + /// Keeps track of any properties unknown to the library. + internal SearchDocumentsResult(long? count, double? coverage, IReadOnlyDictionary> facets, IReadOnlyList answers, DebugInfo debugInfo, SearchOptions nextPageParameters, IReadOnlyList results, string nextLink, SemanticErrorReason? semanticPartialResponseReason, SemanticSearchResultsType? semanticPartialResponseType, SemanticQueryRewritesResultType? semanticQueryRewritesResultType, IDictionary serializedAdditionalRawData) + { + Count = count; + Coverage = coverage; + Facets = facets; + Answers = answers; + DebugInfo = debugInfo; + NextPageParameters = nextPageParameters; + Results = results; + NextLink = nextLink; + SemanticPartialResponseReason = semanticPartialResponseReason; + SemanticPartialResponseType = semanticPartialResponseType; + SemanticQueryRewritesResultType = semanticQueryRewritesResultType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchDocumentsResult() + { + } + + /// + /// The total count of results found by the search operation, or null if the count + /// was not requested. If present, the count may be greater than the number of + /// results in this response. This can happen if you use the $top or $skip + /// parameters, or if the query can't return all the requested documents in a + /// single response. + /// + public long? Count { get; } + /// + /// A value indicating the percentage of the index that was included in the query, + /// or null if minimumCoverage was not specified in the request. + /// + public double? Coverage { get; } + /// + /// The facet query results for the search operation, organized as a collection of + /// buckets for each faceted field; null if the query did not include any facet + /// expressions. + /// + public IReadOnlyDictionary> Facets { get; } + /// + /// The answers query results for the search operation; null if the answers query + /// parameter was not specified or set to 'none'. + /// + public IReadOnlyList Answers { get; } + /// Debug information that applies to the search results as a whole. + public DebugInfo DebugInfo { get; } + /// + /// Continuation JSON payload returned when the query can't return all the + /// requested results in a single response. You can use this JSON along with + /// + public SearchOptions NextPageParameters { get; } + /// The sequence of results returned by the query. + public IReadOnlyList Results { get; } + /// + /// Continuation URL returned when the query can't return all the requested results + /// in a single response. You can use this URL to formulate another GET or POST + /// Search request to get the next part of the search response. Make sure to use + /// the same verb (GET or POST) as the request that produced this response. + /// + public string NextLink { get; } + /// Reason that a partial response was returned for a semantic ranking request. + public SemanticErrorReason? SemanticPartialResponseReason { get; } + /// Type of partial response that was returned for a semantic ranking request. + public SemanticSearchResultsType? SemanticPartialResponseType { get; } + /// Type of query rewrite that was used to retrieve documents. + public SemanticQueryRewritesResultType? SemanticQueryRewritesResultType { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchField.Serialization.cs new file mode 100644 index 000000000000..2d26893dd24a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchField.Serialization.cs @@ -0,0 +1,425 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchField : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchField)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.ToString()); + if (Optional.IsDefined(Key)) + { + writer.WritePropertyName("key"u8); + writer.WriteBooleanValue(Key.Value); + } + if (Optional.IsDefined(Retrievable)) + { + writer.WritePropertyName("retrievable"u8); + writer.WriteBooleanValue(Retrievable.Value); + } + if (Optional.IsDefined(Stored)) + { + writer.WritePropertyName("stored"u8); + writer.WriteBooleanValue(Stored.Value); + } + if (Optional.IsDefined(Searchable)) + { + writer.WritePropertyName("searchable"u8); + writer.WriteBooleanValue(Searchable.Value); + } + if (Optional.IsDefined(Filterable)) + { + writer.WritePropertyName("filterable"u8); + writer.WriteBooleanValue(Filterable.Value); + } + if (Optional.IsDefined(Sortable)) + { + writer.WritePropertyName("sortable"u8); + writer.WriteBooleanValue(Sortable.Value); + } + if (Optional.IsDefined(Facetable)) + { + writer.WritePropertyName("facetable"u8); + writer.WriteBooleanValue(Facetable.Value); + } + if (Optional.IsDefined(Analyzer)) + { + writer.WritePropertyName("analyzer"u8); + writer.WriteStringValue(Analyzer.Value.ToString()); + } + if (Optional.IsDefined(SearchAnalyzer)) + { + writer.WritePropertyName("searchAnalyzer"u8); + writer.WriteStringValue(SearchAnalyzer.Value.ToString()); + } + if (Optional.IsDefined(IndexAnalyzer)) + { + writer.WritePropertyName("indexAnalyzer"u8); + writer.WriteStringValue(IndexAnalyzer.Value.ToString()); + } + if (Optional.IsDefined(Normalizer)) + { + writer.WritePropertyName("normalizer"u8); + writer.WriteStringValue(Normalizer.Value.ToString()); + } + if (Optional.IsDefined(VectorSearchDimensions)) + { + writer.WritePropertyName("dimensions"u8); + writer.WriteNumberValue(VectorSearchDimensions.Value); + } + if (Optional.IsDefined(VectorSearchProfileName)) + { + writer.WritePropertyName("vectorSearchProfile"u8); + writer.WriteStringValue(VectorSearchProfileName); + } + if (Optional.IsDefined(VectorEncodingFormat)) + { + writer.WritePropertyName("vectorEncoding"u8); + writer.WriteStringValue(VectorEncodingFormat.Value.ToString()); + } + if (Optional.IsCollectionDefined(SynonymMaps)) + { + writer.WritePropertyName("synonymMaps"u8); + writer.WriteStartArray(); + foreach (var item in SynonymMaps) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Fields)) + { + writer.WritePropertyName("fields"u8); + writer.WriteStartArray(); + foreach (var item in Fields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchField)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchField(document.RootElement, options); + } + + internal static SearchField DeserializeSearchField(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + SearchFieldDataType type = default; + bool? key = default; + bool? retrievable = default; + bool? stored = default; + bool? searchable = default; + bool? filterable = default; + bool? sortable = default; + bool? facetable = default; + LexicalAnalyzerName? analyzer = default; + LexicalAnalyzerName? searchAnalyzer = default; + LexicalAnalyzerName? indexAnalyzer = default; + LexicalNormalizerName? normalizer = default; + int? dimensions = default; + string vectorSearchProfile = default; + VectorEncodingFormat? vectorEncoding = default; + IList synonymMaps = default; + IList fields = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = new SearchFieldDataType(property.Value.GetString()); + continue; + } + if (property.NameEquals("key"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + key = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("retrievable"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + retrievable = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("stored"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + stored = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("searchable"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchable = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("filterable"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + filterable = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("sortable"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sortable = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("facetable"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + facetable = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("analyzer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + analyzer = new LexicalAnalyzerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("searchAnalyzer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchAnalyzer = new LexicalAnalyzerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("indexAnalyzer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + indexAnalyzer = new LexicalAnalyzerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("normalizer"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + normalizer = new LexicalNormalizerName(property.Value.GetString()); + continue; + } + if (property.NameEquals("dimensions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dimensions = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("vectorSearchProfile"u8)) + { + vectorSearchProfile = property.Value.GetString(); + continue; + } + if (property.NameEquals("vectorEncoding"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + vectorEncoding = new VectorEncodingFormat(property.Value.GetString()); + continue; + } + if (property.NameEquals("synonymMaps"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + synonymMaps = array; + continue; + } + if (property.NameEquals("fields"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DeserializeSearchField(item, options)); + } + fields = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchField( + name, + type, + key, + retrievable, + stored, + searchable, + filterable, + sortable, + facetable, + analyzer, + searchAnalyzer, + indexAnalyzer, + normalizer, + dimensions, + vectorSearchProfile, + vectorEncoding, + synonymMaps ?? new ChangeTrackingList(), + fields ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchField)} does not support writing '{options.Format}' format."); + } + } + + SearchField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchField)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchField FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchField(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchField.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchField.cs new file mode 100644 index 000000000000..bbfb8a27cbe4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchField.cs @@ -0,0 +1,365 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Represents a field in an index definition, which describes the name, data type, + /// and search behavior of a field. + /// + public partial class SearchField + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the field, which must be unique within the fields collection of the + /// index or parent field. + /// + /// The data type of the field. + /// is null. + public SearchField(string name, SearchFieldDataType type) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + Type = type; + SynonymMaps = new ChangeTrackingList(); + Fields = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// The name of the field, which must be unique within the fields collection of the + /// index or parent field. + /// + /// The data type of the field. + /// + /// A value indicating whether the field uniquely identifies documents in the + /// index. Exactly one top-level field in each index must be chosen as the key + /// field and it must be of type Edm.String. Key fields can be used to look up + /// documents directly and update or delete specific documents. Default is false + /// for simple fields and null for complex fields. + /// + /// + /// A value indicating whether the field can be returned in a search result. You + /// can disable this option if you want to use a field (for example, margin) as a + /// filter, sorting, or scoring mechanism but do not want the field to be visible + /// to the end user. This property must be true for key fields, and it must be null + /// for complex fields. This property can be changed on existing fields. Enabling + /// this property does not cause any increase in index storage requirements. + /// Default is true for simple fields, false for vector fields, and null for + /// complex fields. + /// + /// + /// An immutable value indicating whether the field will be persisted separately on + /// disk to be returned in a search result. You can disable this option if you + /// don't plan to return the field contents in a search response to save on storage + /// overhead. This can only be set during index creation and only for vector + /// fields. This property cannot be changed for existing fields or set as false for + /// new fields. If this property is set as false, the property 'retrievable' must + /// also be set to false. This property must be true or unset for key fields, for + /// new fields, and for non-vector fields, and it must be null for complex fields. + /// Disabling this property will reduce index storage requirements. The default is + /// true for vector fields. + /// + /// + /// A value indicating whether the field is full-text searchable. This means it + /// will undergo analysis such as word-breaking during indexing. If you set a + /// searchable field to a value like "sunny day", internally it will be split into + /// the individual tokens "sunny" and "day". This enables full-text searches for + /// these terms. Fields of type Edm.String or Collection(Edm.String) are searchable + /// by default. This property must be false for simple fields of other non-string + /// data types, and it must be null for complex fields. Note: searchable fields + /// consume extra space in your index to accommodate additional tokenized versions + /// of the field value for full-text searches. If you want to save space in your + /// index and you don't need a field to be included in searches, set searchable to + /// false. + /// + /// + /// A value indicating whether to enable the field to be referenced in $filter + /// queries. filterable differs from searchable in how strings are handled. Fields + /// of type Edm.String or Collection(Edm.String) that are filterable do not undergo + /// word-breaking, so comparisons are for exact matches only. For example, if you + /// set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, + /// but $filter=f eq 'sunny day' will. This property must be null for complex + /// fields. Default is true for simple fields and null for complex fields. + /// + /// + /// A value indicating whether to enable the field to be referenced in $orderby + /// expressions. By default, the search engine sorts results by score, but in many + /// experiences users will want to sort by fields in the documents. A simple field + /// can be sortable only if it is single-valued (it has a single value in the scope + /// of the parent document). Simple collection fields cannot be sortable, since + /// they are multi-valued. Simple sub-fields of complex collections are also + /// multi-valued, and therefore cannot be sortable. This is true whether it's an + /// immediate parent field, or an ancestor field, that's the complex collection. + /// Complex fields cannot be sortable and the sortable property must be null for + /// such fields. The default for sortable is true for single-valued simple fields, + /// false for multi-valued simple fields, and null for complex fields. + /// + /// + /// A value indicating whether to enable the field to be referenced in facet + /// queries. Typically used in a presentation of search results that includes hit + /// count by category (for example, search for digital cameras and see hits by + /// brand, by megapixels, by price, and so on). This property must be null for + /// complex fields. Fields of type Edm.GeographyPoint or + /// Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all + /// other simple fields. + /// + /// + /// The name of the analyzer to use for the field. This option can be used only + /// with searchable fields and it can't be set together with either searchAnalyzer + /// or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the + /// field. Must be null for complex fields. + /// + /// + /// The name of the analyzer used at search time for the field. This option can be + /// used only with searchable fields. It must be set together with indexAnalyzer + /// and it cannot be set together with the analyzer option. This property cannot be + /// set to the name of a language analyzer; use the analyzer property instead if + /// you need a language analyzer. This analyzer can be updated on an existing + /// field. Must be null for complex fields. + /// + /// + /// The name of the analyzer used at indexing time for the field. This option can + /// be used only with searchable fields. It must be set together with + /// searchAnalyzer and it cannot be set together with the analyzer option. This + /// property cannot be set to the name of a language analyzer; use the analyzer + /// property instead if you need a language analyzer. Once the analyzer is chosen, + /// it cannot be changed for the field. Must be null for complex fields. + /// + /// + /// The name of the normalizer to use for the field. This option can be used only + /// with fields with filterable, sortable, or facetable enabled. Once the + /// normalizer is chosen, it cannot be changed for the field. Must be null for + /// complex fields. + /// + /// The dimensionality of the vector field. + /// + /// The name of the vector search profile that specifies the algorithm and + /// vectorizer to use when searching the vector field. + /// + /// The encoding format to interpret the field contents. + /// + /// A list of the names of synonym maps to associate with this field. This option + /// can be used only with searchable fields. Currently only one synonym map per + /// field is supported. Assigning a synonym map to a field ensures that query terms + /// targeting that field are expanded at query-time using the rules in the synonym + /// map. This attribute can be changed on existing fields. Must be null or an empty + /// collection for complex fields. + /// + /// + /// A list of sub-fields if this is a field of type Edm.ComplexType or + /// Collection(Edm.ComplexType). Must be null or empty for simple fields. + /// + /// Keeps track of any properties unknown to the library. + internal SearchField(string name, SearchFieldDataType type, bool? key, bool? retrievable, bool? stored, bool? searchable, bool? filterable, bool? sortable, bool? facetable, LexicalAnalyzerName? analyzer, LexicalAnalyzerName? searchAnalyzer, LexicalAnalyzerName? indexAnalyzer, LexicalNormalizerName? normalizer, int? vectorSearchDimensions, string vectorSearchProfileName, VectorEncodingFormat? vectorEncodingFormat, IList synonymMaps, IList fields, IDictionary serializedAdditionalRawData) + { + Name = name; + Type = type; + Key = key; + Retrievable = retrievable; + Stored = stored; + Searchable = searchable; + Filterable = filterable; + Sortable = sortable; + Facetable = facetable; + Analyzer = analyzer; + SearchAnalyzer = searchAnalyzer; + IndexAnalyzer = indexAnalyzer; + Normalizer = normalizer; + VectorSearchDimensions = vectorSearchDimensions; + VectorSearchProfileName = vectorSearchProfileName; + VectorEncodingFormat = vectorEncodingFormat; + SynonymMaps = synonymMaps; + Fields = fields; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchField() + { + } + + /// + /// The name of the field, which must be unique within the fields collection of the + /// index or parent field. + /// + public string Name { get; set; } + /// The data type of the field. + public SearchFieldDataType Type { get; set; } + /// + /// A value indicating whether the field uniquely identifies documents in the + /// index. Exactly one top-level field in each index must be chosen as the key + /// field and it must be of type Edm.String. Key fields can be used to look up + /// documents directly and update or delete specific documents. Default is false + /// for simple fields and null for complex fields. + /// + public bool? Key { get; set; } + /// + /// A value indicating whether the field can be returned in a search result. You + /// can disable this option if you want to use a field (for example, margin) as a + /// filter, sorting, or scoring mechanism but do not want the field to be visible + /// to the end user. This property must be true for key fields, and it must be null + /// for complex fields. This property can be changed on existing fields. Enabling + /// this property does not cause any increase in index storage requirements. + /// Default is true for simple fields, false for vector fields, and null for + /// complex fields. + /// + public bool? Retrievable { get; set; } + /// + /// An immutable value indicating whether the field will be persisted separately on + /// disk to be returned in a search result. You can disable this option if you + /// don't plan to return the field contents in a search response to save on storage + /// overhead. This can only be set during index creation and only for vector + /// fields. This property cannot be changed for existing fields or set as false for + /// new fields. If this property is set as false, the property 'retrievable' must + /// also be set to false. This property must be true or unset for key fields, for + /// new fields, and for non-vector fields, and it must be null for complex fields. + /// Disabling this property will reduce index storage requirements. The default is + /// true for vector fields. + /// + public bool? Stored { get; set; } + /// + /// A value indicating whether the field is full-text searchable. This means it + /// will undergo analysis such as word-breaking during indexing. If you set a + /// searchable field to a value like "sunny day", internally it will be split into + /// the individual tokens "sunny" and "day". This enables full-text searches for + /// these terms. Fields of type Edm.String or Collection(Edm.String) are searchable + /// by default. This property must be false for simple fields of other non-string + /// data types, and it must be null for complex fields. Note: searchable fields + /// consume extra space in your index to accommodate additional tokenized versions + /// of the field value for full-text searches. If you want to save space in your + /// index and you don't need a field to be included in searches, set searchable to + /// false. + /// + public bool? Searchable { get; set; } + /// + /// A value indicating whether to enable the field to be referenced in $filter + /// queries. filterable differs from searchable in how strings are handled. Fields + /// of type Edm.String or Collection(Edm.String) that are filterable do not undergo + /// word-breaking, so comparisons are for exact matches only. For example, if you + /// set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, + /// but $filter=f eq 'sunny day' will. This property must be null for complex + /// fields. Default is true for simple fields and null for complex fields. + /// + public bool? Filterable { get; set; } + /// + /// A value indicating whether to enable the field to be referenced in $orderby + /// expressions. By default, the search engine sorts results by score, but in many + /// experiences users will want to sort by fields in the documents. A simple field + /// can be sortable only if it is single-valued (it has a single value in the scope + /// of the parent document). Simple collection fields cannot be sortable, since + /// they are multi-valued. Simple sub-fields of complex collections are also + /// multi-valued, and therefore cannot be sortable. This is true whether it's an + /// immediate parent field, or an ancestor field, that's the complex collection. + /// Complex fields cannot be sortable and the sortable property must be null for + /// such fields. The default for sortable is true for single-valued simple fields, + /// false for multi-valued simple fields, and null for complex fields. + /// + public bool? Sortable { get; set; } + /// + /// A value indicating whether to enable the field to be referenced in facet + /// queries. Typically used in a presentation of search results that includes hit + /// count by category (for example, search for digital cameras and see hits by + /// brand, by megapixels, by price, and so on). This property must be null for + /// complex fields. Fields of type Edm.GeographyPoint or + /// Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all + /// other simple fields. + /// + public bool? Facetable { get; set; } + /// + /// The name of the analyzer to use for the field. This option can be used only + /// with searchable fields and it can't be set together with either searchAnalyzer + /// or indexAnalyzer. Once the analyzer is chosen, it cannot be changed for the + /// field. Must be null for complex fields. + /// + public LexicalAnalyzerName? Analyzer { get; set; } + /// + /// The name of the analyzer used at search time for the field. This option can be + /// used only with searchable fields. It must be set together with indexAnalyzer + /// and it cannot be set together with the analyzer option. This property cannot be + /// set to the name of a language analyzer; use the analyzer property instead if + /// you need a language analyzer. This analyzer can be updated on an existing + /// field. Must be null for complex fields. + /// + public LexicalAnalyzerName? SearchAnalyzer { get; set; } + /// + /// The name of the analyzer used at indexing time for the field. This option can + /// be used only with searchable fields. It must be set together with + /// searchAnalyzer and it cannot be set together with the analyzer option. This + /// property cannot be set to the name of a language analyzer; use the analyzer + /// property instead if you need a language analyzer. Once the analyzer is chosen, + /// it cannot be changed for the field. Must be null for complex fields. + /// + public LexicalAnalyzerName? IndexAnalyzer { get; set; } + /// + /// The name of the normalizer to use for the field. This option can be used only + /// with fields with filterable, sortable, or facetable enabled. Once the + /// normalizer is chosen, it cannot be changed for the field. Must be null for + /// complex fields. + /// + public LexicalNormalizerName? Normalizer { get; set; } + /// The dimensionality of the vector field. + public int? VectorSearchDimensions { get; set; } + /// + /// The name of the vector search profile that specifies the algorithm and + /// vectorizer to use when searching the vector field. + /// + public string VectorSearchProfileName { get; set; } + /// The encoding format to interpret the field contents. + public VectorEncodingFormat? VectorEncodingFormat { get; set; } + /// + /// A list of the names of synonym maps to associate with this field. This option + /// can be used only with searchable fields. Currently only one synonym map per + /// field is supported. Assigning a synonym map to a field ensures that query terms + /// targeting that field are expanded at query-time using the rules in the synonym + /// map. This attribute can be changed on existing fields. Must be null or an empty + /// collection for complex fields. + /// + public IList SynonymMaps { get; } + /// + /// A list of sub-fields if this is a field of type Edm.ComplexType or + /// Collection(Edm.ComplexType). Must be null or empty for simple fields. + /// + public IList Fields { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchFieldDataType.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchFieldDataType.cs new file mode 100644 index 000000000000..03acb5ca7773 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchFieldDataType.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Defines the data type of a field in a search index. + public readonly partial struct SearchFieldDataType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SearchFieldDataType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string StringValue = "Edm.String"; + private const string Int32Value = "Edm.Int32"; + private const string Int64Value = "Edm.Int64"; + private const string DoubleValue = "Edm.Double"; + private const string BooleanValue = "Edm.Boolean"; + private const string DateTimeOffsetValue = "Edm.DateTimeOffset"; + private const string GeographyPointValue = "Edm.GeographyPoint"; + private const string ComplexValue = "Edm.ComplexType"; + private const string SingleValue = "Edm.Single"; + private const string HalfValue = "Edm.Half"; + private const string Int16Value = "Edm.Int16"; + private const string SByteValue = "Edm.SByte"; + private const string ByteValue = "Edm.Byte"; + + /// Indicates that a field contains a string. + public static SearchFieldDataType String { get; } = new SearchFieldDataType(StringValue); + /// Indicates that a field contains a 32-bit signed integer. + public static SearchFieldDataType Int32 { get; } = new SearchFieldDataType(Int32Value); + /// Indicates that a field contains a 64-bit signed integer. + public static SearchFieldDataType Int64 { get; } = new SearchFieldDataType(Int64Value); + /// Indicates that a field contains an IEEE double-precision floating point number. + public static SearchFieldDataType Double { get; } = new SearchFieldDataType(DoubleValue); + /// Indicates that a field contains a Boolean value (true or false). + public static SearchFieldDataType Boolean { get; } = new SearchFieldDataType(BooleanValue); + /// + /// Indicates that a field contains a date/time value, including timezone + /// information. + /// + public static SearchFieldDataType DateTimeOffset { get; } = new SearchFieldDataType(DateTimeOffsetValue); + /// + /// Indicates that a field contains a geo-location in terms of longitude and + /// latitude. + /// + public static SearchFieldDataType GeographyPoint { get; } = new SearchFieldDataType(GeographyPointValue); + /// + /// Indicates that a field contains one or more complex objects that in turn have + /// sub-fields of other types. + /// + public static SearchFieldDataType Complex { get; } = new SearchFieldDataType(ComplexValue); + /// + /// Indicates that a field contains a single-precision floating point number. This + /// is only valid when used with Collection(Edm.Single). + /// + public static SearchFieldDataType Single { get; } = new SearchFieldDataType(SingleValue); + /// + /// Indicates that a field contains a half-precision floating point number. This is + /// only valid when used with Collection(Edm.Half). + /// + public static SearchFieldDataType Half { get; } = new SearchFieldDataType(HalfValue); + /// + /// Indicates that a field contains a 16-bit signed integer. This is only valid + /// when used with Collection(Edm.Int16). + /// + public static SearchFieldDataType Int16 { get; } = new SearchFieldDataType(Int16Value); + /// + /// Indicates that a field contains a 8-bit signed integer. This is only valid when + /// used with Collection(Edm.SByte). + /// + public static SearchFieldDataType SByte { get; } = new SearchFieldDataType(SByteValue); + /// + /// Indicates that a field contains a 8-bit unsigned integer. This is only valid + /// when used with Collection(Edm.Byte). + /// + public static SearchFieldDataType Byte { get; } = new SearchFieldDataType(ByteValue); + /// Determines if two values are the same. + public static bool operator ==(SearchFieldDataType left, SearchFieldDataType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SearchFieldDataType left, SearchFieldDataType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SearchFieldDataType(string value) => new SearchFieldDataType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SearchFieldDataType other && Equals(other); + /// + public bool Equals(SearchFieldDataType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.Serialization.cs new file mode 100644 index 000000000000..28b7e7c8e9a0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.Serialization.cs @@ -0,0 +1,449 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndex : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndex)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("fields"u8); + writer.WriteStartArray(); + foreach (var item in Fields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsCollectionDefined(ScoringProfiles)) + { + writer.WritePropertyName("scoringProfiles"u8); + writer.WriteStartArray(); + foreach (var item in ScoringProfiles) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(DefaultScoringProfile)) + { + writer.WritePropertyName("defaultScoringProfile"u8); + writer.WriteStringValue(DefaultScoringProfile); + } + if (Optional.IsDefined(CorsOptions)) + { + writer.WritePropertyName("corsOptions"u8); + writer.WriteObjectValue(CorsOptions, options); + } + if (Optional.IsCollectionDefined(Suggesters)) + { + writer.WritePropertyName("suggesters"u8); + writer.WriteStartArray(); + foreach (var item in Suggesters) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Analyzers)) + { + writer.WritePropertyName("analyzers"u8); + writer.WriteStartArray(); + foreach (var item in Analyzers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Tokenizers)) + { + writer.WritePropertyName("tokenizers"u8); + writer.WriteStartArray(); + foreach (var item in Tokenizers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(TokenFilters)) + { + writer.WritePropertyName("tokenFilters"u8); + writer.WriteStartArray(); + foreach (var item in TokenFilters) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(CharFilters)) + { + writer.WritePropertyName("charFilters"u8); + writer.WriteStartArray(); + foreach (var item in CharFilters) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Normalizers)) + { + writer.WritePropertyName("normalizers"u8); + writer.WriteStartArray(); + foreach (var item in Normalizers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(EncryptionKey)) + { + writer.WritePropertyName("encryptionKey"u8); + writer.WriteObjectValue(EncryptionKey, options); + } + if (Optional.IsDefined(Similarity)) + { + writer.WritePropertyName("similarity"u8); + writer.WriteObjectValue(Similarity, options); + } + if (Optional.IsDefined(SemanticSearch)) + { + writer.WritePropertyName("semantic"u8); + writer.WriteObjectValue(SemanticSearch, options); + } + if (Optional.IsDefined(VectorSearch)) + { + writer.WritePropertyName("vectorSearch"u8); + writer.WriteObjectValue(VectorSearch, options); + } + if (Optional.IsDefined(ETag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(ETag); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndex IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndex)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndex(document.RootElement, options); + } + + internal static SearchIndex DeserializeSearchIndex(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + IList fields = default; + IList scoringProfiles = default; + string defaultScoringProfile = default; + CorsOptions corsOptions = default; + IList suggesters = default; + IList analyzers = default; + IList tokenizers = default; + IList tokenFilters = default; + IList charFilters = default; + IList normalizers = default; + SearchResourceEncryptionKey encryptionKey = default; + SimilarityAlgorithm similarity = default; + SemanticSearch semantic = default; + VectorSearch vectorSearch = default; + string odataEtag = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("fields"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchField.DeserializeSearchField(item, options)); + } + fields = array; + continue; + } + if (property.NameEquals("scoringProfiles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(ScoringProfile.DeserializeScoringProfile(item, options)); + } + scoringProfiles = array; + continue; + } + if (property.NameEquals("defaultScoringProfile"u8)) + { + defaultScoringProfile = property.Value.GetString(); + continue; + } + if (property.NameEquals("corsOptions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + corsOptions = CorsOptions.DeserializeCorsOptions(property.Value, options); + continue; + } + if (property.NameEquals("suggesters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchSuggester.DeserializeSearchSuggester(item, options)); + } + suggesters = array; + continue; + } + if (property.NameEquals("analyzers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(LexicalAnalyzer.DeserializeLexicalAnalyzer(item, options)); + } + analyzers = array; + continue; + } + if (property.NameEquals("tokenizers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(LexicalTokenizer.DeserializeLexicalTokenizer(item, options)); + } + tokenizers = array; + continue; + } + if (property.NameEquals("tokenFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(TokenFilter.DeserializeTokenFilter(item, options)); + } + tokenFilters = array; + continue; + } + if (property.NameEquals("charFilters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(CharFilter.DeserializeCharFilter(item, options)); + } + charFilters = array; + continue; + } + if (property.NameEquals("normalizers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(LexicalNormalizer.DeserializeLexicalNormalizer(item, options)); + } + normalizers = array; + continue; + } + if (property.NameEquals("encryptionKey"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value, options); + continue; + } + if (property.NameEquals("similarity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + similarity = SimilarityAlgorithm.DeserializeSimilarityAlgorithm(property.Value, options); + continue; + } + if (property.NameEquals("semantic"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + semantic = SemanticSearch.DeserializeSemanticSearch(property.Value, options); + continue; + } + if (property.NameEquals("vectorSearch"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + vectorSearch = VectorSearch.DeserializeVectorSearch(property.Value, options); + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndex( + name, + fields, + scoringProfiles ?? new ChangeTrackingList(), + defaultScoringProfile, + corsOptions, + suggesters ?? new ChangeTrackingList(), + analyzers ?? new ChangeTrackingList(), + tokenizers ?? new ChangeTrackingList(), + tokenFilters ?? new ChangeTrackingList(), + charFilters ?? new ChangeTrackingList(), + normalizers ?? new ChangeTrackingList(), + encryptionKey, + similarity, + semantic, + vectorSearch, + odataEtag, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndex)} does not support writing '{options.Format}' format."); + } + } + + SearchIndex IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndex(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndex)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndex FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndex(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.cs new file mode 100644 index 000000000000..06964fc0d73d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndex.cs @@ -0,0 +1,232 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Represents a search index definition, which describes the fields and search + /// behavior of an index. + /// + public partial class SearchIndex + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the index. + /// The fields of the index. + /// or is null. + public SearchIndex(string name, IEnumerable fields) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(fields, nameof(fields)); + + Name = name; + Fields = fields.ToList(); + ScoringProfiles = new ChangeTrackingList(); + Suggesters = new ChangeTrackingList(); + Analyzers = new ChangeTrackingList(); + Tokenizers = new ChangeTrackingList(); + TokenFilters = new ChangeTrackingList(); + CharFilters = new ChangeTrackingList(); + Normalizers = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The name of the index. + /// The fields of the index. + /// The scoring profiles for the index. + /// + /// The name of the scoring profile to use if none is specified in the query. If + /// this property is not set and no scoring profile is specified in the query, then + /// default scoring (tf-idf) will be used. + /// + /// Options to control Cross-Origin Resource Sharing (CORS) for the index. + /// The suggesters for the index. + /// + /// The analyzers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// The tokenizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , and . + /// + /// + /// The token filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , and . + /// + /// + /// The character filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The normalizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your data when + /// you want full assurance that no one, not even Microsoft, can decrypt your data. + /// Once you have encrypted your data, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your data will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + /// + /// The type of similarity algorithm to be used when scoring and ranking the + /// documents matching a search query. The similarity algorithm can only be defined + /// at index creation time and cannot be modified on existing indexes. If null, the + /// ClassicSimilarity algorithm is used. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Defines parameters for a search index that influence semantic capabilities. + /// Contains configuration options related to vector search. + /// The ETag of the index. + /// Keeps track of any properties unknown to the library. + internal SearchIndex(string name, IList fields, IList scoringProfiles, string defaultScoringProfile, CorsOptions corsOptions, IList suggesters, IList analyzers, IList tokenizers, IList tokenFilters, IList charFilters, IList normalizers, SearchResourceEncryptionKey encryptionKey, SimilarityAlgorithm similarity, SemanticSearch semanticSearch, VectorSearch vectorSearch, string eTag, IDictionary serializedAdditionalRawData) + { + Name = name; + Fields = fields; + ScoringProfiles = scoringProfiles; + DefaultScoringProfile = defaultScoringProfile; + CorsOptions = corsOptions; + Suggesters = suggesters; + Analyzers = analyzers; + Tokenizers = tokenizers; + TokenFilters = tokenFilters; + CharFilters = charFilters; + Normalizers = normalizers; + EncryptionKey = encryptionKey; + Similarity = similarity; + SemanticSearch = semanticSearch; + VectorSearch = vectorSearch; + ETag = eTag; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndex() + { + } + + /// The name of the index. + public string Name { get; set; } + /// The fields of the index. + public IList Fields { get; } + /// The scoring profiles for the index. + public IList ScoringProfiles { get; } + /// + /// The name of the scoring profile to use if none is specified in the query. If + /// this property is not set and no scoring profile is specified in the query, then + /// default scoring (tf-idf) will be used. + /// + public string DefaultScoringProfile { get; set; } + /// Options to control Cross-Origin Resource Sharing (CORS) for the index. + public CorsOptions CorsOptions { get; set; } + /// The suggesters for the index. + public IList Suggesters { get; } + /// + /// The analyzers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public IList Analyzers { get; } + /// + /// The tokenizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , and . + /// + public IList Tokenizers { get; } + /// + /// The token filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , and . + /// + public IList TokenFilters { get; } + /// + /// The character filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public IList CharFilters { get; } + /// + /// The normalizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + public IList Normalizers { get; } + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your data when + /// you want full assurance that no one, not even Microsoft, can decrypt your data. + /// Once you have encrypted your data, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your data will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + public SearchResourceEncryptionKey EncryptionKey { get; set; } + /// + /// The type of similarity algorithm to be used when scoring and ranking the + /// documents matching a search query. The similarity algorithm can only be defined + /// at index creation time and cannot be modified on existing indexes. If null, the + /// ClassicSimilarity algorithm is used. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SimilarityAlgorithm Similarity { get; set; } + /// Defines parameters for a search index that influence semantic capabilities. + public SemanticSearch SemanticSearch { get; set; } + /// Contains configuration options related to vector search. + public VectorSearch VectorSearch { get; set; } + /// The ETag of the index. + public string ETag { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.Serialization.cs new file mode 100644 index 000000000000..89ce05dc3d54 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.Serialization.cs @@ -0,0 +1,158 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class SearchIndexStatistics : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexStatistics)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("documentCount"u8); + writer.WriteNumberValue(DocumentCount); + writer.WritePropertyName("storageSize"u8); + writer.WriteNumberValue(StorageSize); + writer.WritePropertyName("vectorIndexSize"u8); + writer.WriteNumberValue(VectorIndexSize); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.SearchIndexStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexStatistics)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.SearchIndexStatistics.DeserializeSearchIndexStatistics(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.SearchIndexStatistics DeserializeSearchIndexStatistics(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long documentCount = default; + long storageSize = default; + long vectorIndexSize = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("documentCount"u8)) + { + documentCount = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("storageSize"u8)) + { + storageSize = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("vectorIndexSize"u8)) + { + vectorIndexSize = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.SearchIndexStatistics(documentCount, storageSize, vectorIndexSize, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexStatistics)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.SearchIndexStatistics IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchIndexStatistics.DeserializeSearchIndexStatistics(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexStatistics)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.SearchIndexStatistics FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchIndexStatistics.DeserializeSearchIndexStatistics(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.cs new file mode 100644 index 000000000000..a852e2385e6f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexStatistics.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Statistics for a given index. Statistics are collected periodically and are not + /// guaranteed to always be up-to-date. + /// + public partial class SearchIndexStatistics + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// The amount of memory in bytes consumed by vectors in the index. + internal SearchIndexStatistics(long documentCount, long storageSize, long vectorIndexSize) + { + DocumentCount = documentCount; + StorageSize = storageSize; + VectorIndexSize = vectorIndexSize; + } + + /// Initializes a new instance of . + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// The amount of memory in bytes consumed by vectors in the index. + /// Keeps track of any properties unknown to the library. + internal SearchIndexStatistics(long documentCount, long storageSize, long vectorIndexSize, IDictionary serializedAdditionalRawData) + { + DocumentCount = documentCount; + StorageSize = storageSize; + VectorIndexSize = vectorIndexSize; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexStatistics() + { + } + + /// The number of documents in the index. + public long DocumentCount { get; } + /// The amount of storage in bytes consumed by the index. + public long StorageSize { get; } + /// The amount of memory in bytes consumed by vectors in the index. + public long VectorIndexSize { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.Serialization.cs new file mode 100644 index 000000000000..cc0172e531f8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.Serialization.cs @@ -0,0 +1,330 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + writer.WritePropertyName("dataSourceName"u8); + writer.WriteStringValue(DataSourceName); + if (Optional.IsDefined(SkillsetName)) + { + writer.WritePropertyName("skillsetName"u8); + writer.WriteStringValue(SkillsetName); + } + writer.WritePropertyName("targetIndexName"u8); + writer.WriteStringValue(TargetIndexName); + if (Optional.IsDefined(Schedule)) + { + writer.WritePropertyName("schedule"u8); + writer.WriteObjectValue(Schedule, options); + } + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteObjectValue(Parameters, options); + } + if (Optional.IsCollectionDefined(FieldMappings)) + { + writer.WritePropertyName("fieldMappings"u8); + writer.WriteStartArray(); + foreach (var item in FieldMappings) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(OutputFieldMappings)) + { + writer.WritePropertyName("outputFieldMappings"u8); + writer.WriteStartArray(); + foreach (var item in OutputFieldMappings) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(IsDisabled)) + { + writer.WritePropertyName("disabled"u8); + writer.WriteBooleanValue(IsDisabled.Value); + } + if (Optional.IsDefined(ETag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(ETag); + } + if (Optional.IsDefined(EncryptionKey)) + { + writer.WritePropertyName("encryptionKey"u8); + writer.WriteObjectValue(EncryptionKey, options); + } + if (Optional.IsDefined(Cache)) + { + writer.WritePropertyName("cache"u8); + writer.WriteObjectValue(Cache, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexer(document.RootElement, options); + } + + internal static SearchIndexer DeserializeSearchIndexer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string description = default; + string dataSourceName = default; + string skillsetName = default; + string targetIndexName = default; + IndexingSchedule schedule = default; + IndexingParameters parameters = default; + IList fieldMappings = default; + IList outputFieldMappings = default; + bool? disabled = default; + string odataEtag = default; + SearchResourceEncryptionKey encryptionKey = default; + SearchIndexerCache cache = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("dataSourceName"u8)) + { + dataSourceName = property.Value.GetString(); + continue; + } + if (property.NameEquals("skillsetName"u8)) + { + skillsetName = property.Value.GetString(); + continue; + } + if (property.NameEquals("targetIndexName"u8)) + { + targetIndexName = property.Value.GetString(); + continue; + } + if (property.NameEquals("schedule"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + schedule = IndexingSchedule.DeserializeIndexingSchedule(property.Value, options); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + parameters = IndexingParameters.DeserializeIndexingParameters(property.Value, options); + continue; + } + if (property.NameEquals("fieldMappings"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FieldMapping.DeserializeFieldMapping(item, options)); + } + fieldMappings = array; + continue; + } + if (property.NameEquals("outputFieldMappings"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FieldMapping.DeserializeFieldMapping(item, options)); + } + outputFieldMappings = array; + continue; + } + if (property.NameEquals("disabled"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disabled = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (property.NameEquals("encryptionKey"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value, options); + continue; + } + if (property.NameEquals("cache"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + cache = SearchIndexerCache.DeserializeSearchIndexerCache(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexer( + name, + description, + dataSourceName, + skillsetName, + targetIndexName, + schedule, + parameters, + fieldMappings ?? new ChangeTrackingList(), + outputFieldMappings ?? new ChangeTrackingList(), + disabled, + odataEtag, + encryptionKey, + cache, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexer)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.cs new file mode 100644 index 000000000000..30403374c790 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexer.cs @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents an indexer. + public partial class SearchIndexer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the indexer. + /// The name of the datasource from which this indexer reads data. + /// The name of the index to which this indexer writes data. + /// , or is null. + public SearchIndexer(string name, string dataSourceName, string targetIndexName) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(dataSourceName, nameof(dataSourceName)); + Argument.AssertNotNull(targetIndexName, nameof(targetIndexName)); + + Name = name; + DataSourceName = dataSourceName; + TargetIndexName = targetIndexName; + FieldMappings = new ChangeTrackingList(); + OutputFieldMappings = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The name of the indexer. + /// The description of the indexer. + /// The name of the datasource from which this indexer reads data. + /// The name of the skillset executing with this indexer. + /// The name of the index to which this indexer writes data. + /// The schedule for this indexer. + /// Parameters for indexer execution. + /// + /// Defines mappings between fields in the data source and corresponding target + /// fields in the index. + /// + /// + /// Output field mappings are applied after enrichment and immediately before + /// indexing. + /// + /// A value indicating whether the indexer is disabled. Default is false. + /// The ETag of the indexer. + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your indexer + /// definition (as well as indexer execution status) when you want full assurance + /// that no one, not even Microsoft, can decrypt them. Once you have encrypted your + /// indexer definition, it will always remain encrypted. The search service will + /// ignore attempts to set this property to null. You can change this property as + /// needed if you want to rotate your encryption key; Your indexer definition (and + /// indexer execution status) will be unaffected. Encryption with customer-managed + /// keys is not available for free search services, and is only available for paid + /// services created on or after January 1, 2019. + /// + /// + /// Adds caching to an enrichment pipeline to allow for incremental modification + /// steps without having to rebuild the index every time. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexer(string name, string description, string dataSourceName, string skillsetName, string targetIndexName, IndexingSchedule schedule, IndexingParameters parameters, IList fieldMappings, IList outputFieldMappings, bool? isDisabled, string eTag, SearchResourceEncryptionKey encryptionKey, SearchIndexerCache cache, IDictionary serializedAdditionalRawData) + { + Name = name; + Description = description; + DataSourceName = dataSourceName; + SkillsetName = skillsetName; + TargetIndexName = targetIndexName; + Schedule = schedule; + Parameters = parameters; + FieldMappings = fieldMappings; + OutputFieldMappings = outputFieldMappings; + IsDisabled = isDisabled; + ETag = eTag; + EncryptionKey = encryptionKey; + Cache = cache; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexer() + { + } + + /// The name of the indexer. + public string Name { get; set; } + /// The description of the indexer. + public string Description { get; set; } + /// The name of the datasource from which this indexer reads data. + public string DataSourceName { get; set; } + /// The name of the skillset executing with this indexer. + public string SkillsetName { get; set; } + /// The name of the index to which this indexer writes data. + public string TargetIndexName { get; set; } + /// The schedule for this indexer. + public IndexingSchedule Schedule { get; set; } + /// Parameters for indexer execution. + public IndexingParameters Parameters { get; set; } + /// + /// Defines mappings between fields in the data source and corresponding target + /// fields in the index. + /// + public IList FieldMappings { get; } + /// + /// Output field mappings are applied after enrichment and immediately before + /// indexing. + /// + public IList OutputFieldMappings { get; } + /// A value indicating whether the indexer is disabled. Default is false. + public bool? IsDisabled { get; set; } + /// The ETag of the indexer. + public string ETag { get; set; } + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your indexer + /// definition (as well as indexer execution status) when you want full assurance + /// that no one, not even Microsoft, can decrypt them. Once you have encrypted your + /// indexer definition, it will always remain encrypted. The search service will + /// ignore attempts to set this property to null. You can change this property as + /// needed if you want to rotate your encryption key; Your indexer definition (and + /// indexer execution status) will be unaffected. Encryption with customer-managed + /// keys is not available for free search services, and is only available for paid + /// services created on or after January 1, 2019. + /// + public SearchResourceEncryptionKey EncryptionKey { get; set; } + /// + /// Adds caching to an enrichment pipeline to allow for incremental modification + /// steps without having to rebuild the index every time. + /// + public SearchIndexerCache Cache { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.Serialization.cs new file mode 100644 index 000000000000..1d1852692616 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.Serialization.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerCache : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerCache)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(StorageConnectionString)) + { + writer.WritePropertyName("storageConnectionString"u8); + writer.WriteStringValue(StorageConnectionString); + } + if (Optional.IsDefined(EnableReprocessing)) + { + writer.WritePropertyName("enableReprocessing"u8); + writer.WriteBooleanValue(EnableReprocessing.Value); + } + if (Optional.IsDefined(Identity)) + { + writer.WritePropertyName("identity"u8); + writer.WriteObjectValue(Identity, options); + } + if (Optional.IsDefined(Id)) + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerCache IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerCache)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerCache(document.RootElement, options); + } + + internal static SearchIndexerCache DeserializeSearchIndexerCache(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string storageConnectionString = default; + bool? enableReprocessing = default; + SearchIndexerDataIdentity identity = default; + string id = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("storageConnectionString"u8)) + { + storageConnectionString = property.Value.GetString(); + continue; + } + if (property.NameEquals("enableReprocessing"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableReprocessing = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("identity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("id"u8)) + { + id = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerCache(storageConnectionString, enableReprocessing, identity, id, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerCache)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerCache IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerCache(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerCache)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerCache FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerCache(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.cs new file mode 100644 index 000000000000..4142fe925a0d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerCache.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// The type of the cache. + public partial class SearchIndexerCache + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public SearchIndexerCache() + { + } + + /// Initializes a new instance of . + /// + /// The connection string to the storage account where the cache data will be + /// persisted. + /// + /// Specifies whether incremental reprocessing is enabled. + /// + /// The user-assigned managed identity used for connections to the enrichment + /// cache. If the connection string indicates an identity (ResourceId) and it's + /// not specified, the system-assigned managed identity is used. On updates to the + /// indexer, if the identity is unspecified, the value remains unchanged. If set to + /// "none", the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// A guid for the SearchIndexerCache. + /// Keeps track of any properties unknown to the library. + internal SearchIndexerCache(string storageConnectionString, bool? enableReprocessing, SearchIndexerDataIdentity identity, string id, IDictionary serializedAdditionalRawData) + { + StorageConnectionString = storageConnectionString; + EnableReprocessing = enableReprocessing; + Identity = identity; + Id = id; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The connection string to the storage account where the cache data will be + /// persisted. + /// + public string StorageConnectionString { get; set; } + /// Specifies whether incremental reprocessing is enabled. + public bool? EnableReprocessing { get; set; } + /// + /// The user-assigned managed identity used for connections to the enrichment + /// cache. If the connection string indicates an identity (ResourceId) and it's + /// not specified, the system-assigned managed identity is used. On updates to the + /// indexer, if the identity is unspecified, the value remains unchanged. If set to + /// "none", the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity Identity { get; set; } + /// A guid for the SearchIndexerCache. + public string Id { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.Serialization.cs new file mode 100644 index 000000000000..ffcb8696e9a1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerDataContainer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataContainer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Query)) + { + writer.WritePropertyName("query"u8); + writer.WriteStringValue(Query); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerDataContainer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataContainer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerDataContainer(document.RootElement, options); + } + + internal static SearchIndexerDataContainer DeserializeSearchIndexerDataContainer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string query = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("query"u8)) + { + query = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerDataContainer(name, query, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerDataContainer)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerDataContainer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataContainer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerDataContainer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerDataContainer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataContainer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.cs new file mode 100644 index 000000000000..b2a8fbbec5c7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataContainer.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Represents information about the entity (such as Azure SQL table or CosmosDB + /// collection) that will be indexed. + /// + public partial class SearchIndexerDataContainer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the table or view (for Azure SQL data source) or collection (for + /// CosmosDB data source) that will be indexed. + /// + /// is null. + public SearchIndexerDataContainer(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// + /// The name of the table or view (for Azure SQL data source) or collection (for + /// CosmosDB data source) that will be indexed. + /// + /// + /// A query that is applied to this data container. The syntax and meaning of this + /// parameter is datasource-specific. Not supported by Azure SQL datasources. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerDataContainer(string name, string query, IDictionary serializedAdditionalRawData) + { + Name = name; + Query = query; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerDataContainer() + { + } + + /// + /// The name of the table or view (for Azure SQL data source) or collection (for + /// CosmosDB data source) that will be indexed. + /// + public string Name { get; set; } + /// + /// A query that is applied to this data container. The syntax and meaning of this + /// parameter is datasource-specific. Not supported by Azure SQL datasources. + /// + public string Query { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.Serialization.cs new file mode 100644 index 000000000000..019b85c9c348 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownSearchIndexerDataIdentity))] + public partial class SearchIndexerDataIdentity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerDataIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerDataIdentity(document.RootElement, options); + } + + internal static SearchIndexerDataIdentity DeserializeSearchIndexerDataIdentity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.DataNoneIdentity": return SearchIndexerDataNoneIdentity.DeserializeSearchIndexerDataNoneIdentity(element, options); + case "#Microsoft.Azure.Search.DataUserAssignedIdentity": return SearchIndexerDataUserAssignedIdentity.DeserializeSearchIndexerDataUserAssignedIdentity(element, options); + } + } + return UnknownSearchIndexerDataIdentity.DeserializeUnknownSearchIndexerDataIdentity(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerDataIdentity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataIdentity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerDataIdentity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataIdentity(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.cs new file mode 100644 index 000000000000..6de6fce67d4b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataIdentity.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Abstract base type for data identities. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class SearchIndexerDataIdentity + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected SearchIndexerDataIdentity() + { + } + + /// Initializes a new instance of . + /// A URI fragment specifying the type of identity. + /// Keeps track of any properties unknown to the library. + internal SearchIndexerDataIdentity(string odataType, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// A URI fragment specifying the type of identity. + internal string OdataType { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.Serialization.cs new file mode 100644 index 000000000000..010001c49411 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerDataNoneIdentity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataNoneIdentity)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + SearchIndexerDataNoneIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataNoneIdentity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerDataNoneIdentity(document.RootElement, options); + } + + internal static SearchIndexerDataNoneIdentity DeserializeSearchIndexerDataNoneIdentity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerDataNoneIdentity(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerDataNoneIdentity)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerDataNoneIdentity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataNoneIdentity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerDataNoneIdentity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SearchIndexerDataNoneIdentity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataNoneIdentity(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.cs new file mode 100644 index 000000000000..cdd7389db57e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataNoneIdentity.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Clears the identity property of a datasource. + public partial class SearchIndexerDataNoneIdentity : SearchIndexerDataIdentity + { + /// Initializes a new instance of . + public SearchIndexerDataNoneIdentity() + { + OdataType = "#Microsoft.Azure.Search.DataNoneIdentity"; + } + + /// Initializes a new instance of . + /// A URI fragment specifying the type of identity. + /// Keeps track of any properties unknown to the library. + internal SearchIndexerDataNoneIdentity(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.Serialization.cs new file mode 100644 index 000000000000..db0ddcfb52af --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.Serialization.cs @@ -0,0 +1,259 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class SearchIndexerDataSourceConnection : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.ToString()); + writer.WritePropertyName("credentials"u8); + writer.WriteObjectValue(CredentialsInternal, options); + writer.WritePropertyName("container"u8); + writer.WriteObjectValue(Container, options); + if (Optional.IsDefined(Identity)) + { + writer.WritePropertyName("identity"u8); + writer.WriteObjectValue(Identity, options); + } + if (Optional.IsDefined(DataChangeDetectionPolicy)) + { + writer.WritePropertyName("dataChangeDetectionPolicy"u8); + writer.WriteObjectValue(DataChangeDetectionPolicy, options); + } + if (Optional.IsDefined(DataDeletionDetectionPolicy)) + { + writer.WritePropertyName("dataDeletionDetectionPolicy"u8); + writer.WriteObjectValue(DataDeletionDetectionPolicy, options); + } + if (Optional.IsDefined(_etag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(_etag); + } + if (Optional.IsDefined(EncryptionKey)) + { + writer.WritePropertyName("encryptionKey"u8); + writer.WriteObjectValue(EncryptionKey, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection DeserializeSearchIndexerDataSourceConnection(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string description = default; + SearchIndexerDataSourceType type = default; + Search.Documents.DataSourceCredentials credentials = default; + SearchIndexerDataContainer container = default; + Search.Documents.SearchIndexerDataIdentity identity = default; + Search.Documents.DataChangeDetectionPolicy dataChangeDetectionPolicy = default; + Search.Documents.DataDeletionDetectionPolicy dataDeletionDetectionPolicy = default; + string odataEtag = default; + Search.Documents.SearchResourceEncryptionKey encryptionKey = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = new SearchIndexerDataSourceType(property.Value.GetString()); + continue; + } + if (property.NameEquals("credentials"u8)) + { + credentials = Search.Documents.DataSourceCredentials.DeserializeDataSourceCredentials(property.Value, options); + continue; + } + if (property.NameEquals("container"u8)) + { + container = SearchIndexerDataContainer.DeserializeSearchIndexerDataContainer(property.Value, options); + continue; + } + if (property.NameEquals("identity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + identity = Search.Documents.SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("dataChangeDetectionPolicy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dataChangeDetectionPolicy = Search.Documents.DataChangeDetectionPolicy.DeserializeDataChangeDetectionPolicy(property.Value, options); + continue; + } + if (property.NameEquals("dataDeletionDetectionPolicy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dataDeletionDetectionPolicy = Search.Documents.DataDeletionDetectionPolicy.DeserializeDataDeletionDetectionPolicy(property.Value, options); + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (property.NameEquals("encryptionKey"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionKey = Search.Documents.SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection( + name, + description, + type, + credentials, + container, + identity, + dataChangeDetectionPolicy, + dataDeletionDetectionPolicy, + odataEtag, + encryptionKey, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection.DeserializeSearchIndexerDataSourceConnection(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.cs new file mode 100644 index 000000000000..7be33003cec7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceConnection.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Represents a datasource definition, which can be used to configure an indexer. + public partial class SearchIndexerDataSourceConnection + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the datasource. + /// The type of the datasource. + /// Credentials for the datasource. + /// The data container for the datasource. + /// , or is null. + public SearchIndexerDataSourceConnection(string name, SearchIndexerDataSourceType type, Search.Documents.DataSourceCredentials credentialsInternal, SearchIndexerDataContainer container) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(credentialsInternal, nameof(credentialsInternal)); + Argument.AssertNotNull(container, nameof(container)); + + Name = name; + Type = type; + CredentialsInternal = credentialsInternal; + Container = container; + } + + /// Initializes a new instance of . + /// The name of the datasource. + /// The description of the datasource. + /// The type of the datasource. + /// Credentials for the datasource. + /// The data container for the datasource. + /// + /// An explicit managed identity to use for this datasource. If not specified and + /// the connection string is a managed identity, the system-assigned managed + /// identity is used. If not specified, the value remains unchanged. If "none" is + /// specified, the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The data change detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The data deletion detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// The ETag of the data source. + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your + /// datasource definition when you want full assurance that no one, not even + /// Microsoft, can decrypt your data source definition. Once you have encrypted + /// your data source definition, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your datasource + /// definition will be unaffected. Encryption with customer-managed keys is not + /// available for free search services, and is only available for paid services + /// created on or after January 1, 2019. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerDataSourceConnection(string name, string description, SearchIndexerDataSourceType type, Search.Documents.DataSourceCredentials credentialsInternal, SearchIndexerDataContainer container, Search.Documents.SearchIndexerDataIdentity identity, Search.Documents.DataChangeDetectionPolicy dataChangeDetectionPolicy, Search.Documents.DataDeletionDetectionPolicy dataDeletionDetectionPolicy, string etag, Search.Documents.SearchResourceEncryptionKey encryptionKey, IDictionary serializedAdditionalRawData) + { + Name = name; + Description = description; + Type = type; + CredentialsInternal = credentialsInternal; + Container = container; + Identity = identity; + DataChangeDetectionPolicy = dataChangeDetectionPolicy; + DataDeletionDetectionPolicy = dataDeletionDetectionPolicy; + _etag = etag; + EncryptionKey = encryptionKey; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerDataSourceConnection() + { + } + + /// The name of the datasource. + public string Name { get; set; } + /// The description of the datasource. + public string Description { get; set; } + /// The type of the datasource. + public SearchIndexerDataSourceType Type { get; set; } + /// The data container for the datasource. + public SearchIndexerDataContainer Container { get; set; } + /// + /// An explicit managed identity to use for this datasource. If not specified and + /// the connection string is a managed identity, the system-assigned managed + /// identity is used. If not specified, the value remains unchanged. If "none" is + /// specified, the value of this property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public Search.Documents.SearchIndexerDataIdentity Identity { get; set; } + /// + /// The data change detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public Search.Documents.DataChangeDetectionPolicy DataChangeDetectionPolicy { get; set; } + /// + /// The data deletion detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public Search.Documents.DataDeletionDetectionPolicy DataDeletionDetectionPolicy { get; set; } + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your + /// datasource definition when you want full assurance that no one, not even + /// Microsoft, can decrypt your data source definition. Once you have encrypted + /// your data source definition, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your datasource + /// definition will be unaffected. Encryption with customer-managed keys is not + /// available for free search services, and is only available for paid services + /// created on or after January 1, 2019. + /// + public Search.Documents.SearchResourceEncryptionKey EncryptionKey { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceType.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceType.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceType.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceType.cs index 6bd435e0c790..fcee372eae73 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchIndexerDataSourceType.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataSourceType.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Defines the type of a datasource. public readonly partial struct SearchIndexerDataSourceType : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.Serialization.cs new file mode 100644 index 000000000000..b8fa1b216062 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerDataUserAssignedIdentity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataUserAssignedIdentity)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("userAssignedIdentity"u8); + writer.WriteStringValue(ResourceId); + } + + SearchIndexerDataUserAssignedIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataUserAssignedIdentity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerDataUserAssignedIdentity(document.RootElement, options); + } + + internal static SearchIndexerDataUserAssignedIdentity DeserializeSearchIndexerDataUserAssignedIdentity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string userAssignedIdentity = default; + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("userAssignedIdentity"u8)) + { + userAssignedIdentity = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerDataUserAssignedIdentity(odataType, serializedAdditionalRawData, userAssignedIdentity); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerDataUserAssignedIdentity)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerDataUserAssignedIdentity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataUserAssignedIdentity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerDataUserAssignedIdentity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SearchIndexerDataUserAssignedIdentity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataUserAssignedIdentity(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.cs new file mode 100644 index 000000000000..6eb4884e7fa0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerDataUserAssignedIdentity.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Specifies the identity for a datasource to use. + public partial class SearchIndexerDataUserAssignedIdentity : SearchIndexerDataIdentity + { + /// Initializes a new instance of . + /// + /// The fully qualified Azure resource Id of a user assigned managed identity + /// typically in the form + /// "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" + /// that should have been assigned to the search service. + /// + /// is null. + public SearchIndexerDataUserAssignedIdentity(string resourceId) + { + Argument.AssertNotNull(resourceId, nameof(resourceId)); + + OdataType = "#Microsoft.Azure.Search.DataUserAssignedIdentity"; + ResourceId = resourceId; + } + + /// Initializes a new instance of . + /// A URI fragment specifying the type of identity. + /// Keeps track of any properties unknown to the library. + /// + /// The fully qualified Azure resource Id of a user assigned managed identity + /// typically in the form + /// "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" + /// that should have been assigned to the search service. + /// + internal SearchIndexerDataUserAssignedIdentity(string odataType, IDictionary serializedAdditionalRawData, string resourceId) : base(odataType, serializedAdditionalRawData) + { + ResourceId = resourceId; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerDataUserAssignedIdentity() + { + } + + /// + /// The fully qualified Azure resource Id of a user assigned managed identity + /// typically in the form + /// "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" + /// that should have been assigned to the search service. + /// + public string ResourceId { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.Serialization.cs new file mode 100644 index 000000000000..ce5f91484e79 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.Serialization.cs @@ -0,0 +1,201 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerError : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerError)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Key)) + { + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + } + writer.WritePropertyName("errorMessage"u8); + writer.WriteStringValue(ErrorMessage); + writer.WritePropertyName("statusCode"u8); + writer.WriteNumberValue(StatusCode); + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(Details)) + { + writer.WritePropertyName("details"u8); + writer.WriteStringValue(Details); + } + if (Optional.IsDefined(DocumentationLink)) + { + writer.WritePropertyName("documentationLink"u8); + writer.WriteStringValue(DocumentationLink); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerError IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerError)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerError(document.RootElement, options); + } + + internal static SearchIndexerError DeserializeSearchIndexerError(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string key = default; + string errorMessage = default; + int statusCode = default; + string name = default; + string details = default; + string documentationLink = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("errorMessage"u8)) + { + errorMessage = property.Value.GetString(); + continue; + } + if (property.NameEquals("statusCode"u8)) + { + statusCode = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("details"u8)) + { + details = property.Value.GetString(); + continue; + } + if (property.NameEquals("documentationLink"u8)) + { + documentationLink = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerError( + key, + errorMessage, + statusCode, + name, + details, + documentationLink, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerError)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerError IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerError(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerError)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerError FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerError(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.cs new file mode 100644 index 000000000000..545263c01dc9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerError.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents an item- or document-level indexing error. + public partial class SearchIndexerError + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The message describing the error that occurred while processing the item. + /// + /// The status code indicating why the indexing operation failed. Possible values + /// include: 400 for a malformed input document, 404 for document not found, 409 + /// for a version conflict, 422 when the index is temporarily unavailable, or 503 + /// for when the service is too busy. + /// + /// is null. + internal SearchIndexerError(string errorMessage, int statusCode) + { + Argument.AssertNotNull(errorMessage, nameof(errorMessage)); + + ErrorMessage = errorMessage; + StatusCode = statusCode; + } + + /// Initializes a new instance of . + /// The key of the item for which indexing failed. + /// The message describing the error that occurred while processing the item. + /// + /// The status code indicating why the indexing operation failed. Possible values + /// include: 400 for a malformed input document, 404 for document not found, 409 + /// for a version conflict, 422 when the index is temporarily unavailable, or 503 + /// for when the service is too busy. + /// + /// + /// The name of the source at which the error originated. For example, this could + /// refer to a particular skill in the attached skillset. This may not be always + /// available. + /// + /// + /// Additional, verbose details about the error to assist in debugging the indexer. + /// This may not be always available. + /// + /// + /// A link to a troubleshooting guide for these classes of errors. This may not be + /// always available. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink, IDictionary serializedAdditionalRawData) + { + Key = key; + ErrorMessage = errorMessage; + StatusCode = statusCode; + Name = name; + Details = details; + DocumentationLink = documentationLink; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerError() + { + } + + /// The key of the item for which indexing failed. + public string Key { get; } + /// The message describing the error that occurred while processing the item. + public string ErrorMessage { get; } + /// + /// The status code indicating why the indexing operation failed. Possible values + /// include: 400 for a malformed input document, 404 for document not found, 409 + /// for a version conflict, 422 when the index is temporarily unavailable, or 503 + /// for when the service is too busy. + /// + public int StatusCode { get; } + /// + /// The name of the source at which the error originated. For example, this could + /// refer to a particular skill in the attached skillset. This may not be always + /// available. + /// + public string Name { get; } + /// + /// Additional, verbose details about the error to assist in debugging the indexer. + /// This may not be always available. + /// + public string Details { get; } + /// + /// A link to a troubleshooting guide for these classes of errors. This may not be + /// always available. + /// + public string DocumentationLink { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.Serialization.cs new file mode 100644 index 000000000000..d952644d82ea --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerIndexProjection : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjection)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("selectors"u8); + writer.WriteStartArray(); + foreach (var item in Selectors) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteObjectValue(Parameters, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerIndexProjection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjection)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerIndexProjection(document.RootElement, options); + } + + internal static SearchIndexerIndexProjection DeserializeSearchIndexerIndexProjection(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList selectors = default; + SearchIndexerIndexProjectionsParameters parameters = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("selectors"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexerIndexProjectionSelector.DeserializeSearchIndexerIndexProjectionSelector(item, options)); + } + selectors = array; + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + parameters = SearchIndexerIndexProjectionsParameters.DeserializeSearchIndexerIndexProjectionsParameters(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerIndexProjection(selectors, parameters, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjection)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerIndexProjection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjection(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjection)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerIndexProjection FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjection(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.cs new file mode 100644 index 000000000000..d2c244f8aa82 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjection.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Definition of additional projections to secondary search indexes. + public partial class SearchIndexerIndexProjection + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A list of projections to be performed to secondary search indexes. + /// is null. + public SearchIndexerIndexProjection(IEnumerable selectors) + { + Argument.AssertNotNull(selectors, nameof(selectors)); + + Selectors = selectors.ToList(); + } + + /// Initializes a new instance of . + /// A list of projections to be performed to secondary search indexes. + /// + /// A dictionary of index projection-specific configuration properties. Each name + /// is the name of a specific property. Each value must be of a primitive type. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerIndexProjection(IList selectors, SearchIndexerIndexProjectionsParameters parameters, IDictionary serializedAdditionalRawData) + { + Selectors = selectors; + Parameters = parameters; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerIndexProjection() + { + } + + /// A list of projections to be performed to secondary search indexes. + public IList Selectors { get; } + /// + /// A dictionary of index projection-specific configuration properties. Each name + /// is the name of a specific property. Each value must be of a primitive type. + /// + public SearchIndexerIndexProjectionsParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.Serialization.cs new file mode 100644 index 000000000000..4da698ef0dbe --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerIndexProjectionSelector : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionSelector)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("targetIndexName"u8); + writer.WriteStringValue(TargetIndexName); + writer.WritePropertyName("parentKeyFieldName"u8); + writer.WriteStringValue(ParentKeyFieldName); + writer.WritePropertyName("sourceContext"u8); + writer.WriteStringValue(SourceContext); + writer.WritePropertyName("mappings"u8); + writer.WriteStartArray(); + foreach (var item in Mappings) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerIndexProjectionSelector IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionSelector)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerIndexProjectionSelector(document.RootElement, options); + } + + internal static SearchIndexerIndexProjectionSelector DeserializeSearchIndexerIndexProjectionSelector(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string targetIndexName = default; + string parentKeyFieldName = default; + string sourceContext = default; + IList mappings = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("targetIndexName"u8)) + { + targetIndexName = property.Value.GetString(); + continue; + } + if (property.NameEquals("parentKeyFieldName"u8)) + { + parentKeyFieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceContext"u8)) + { + sourceContext = property.Value.GetString(); + continue; + } + if (property.NameEquals("mappings"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + mappings = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerIndexProjectionSelector(targetIndexName, parentKeyFieldName, sourceContext, mappings, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionSelector)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerIndexProjectionSelector IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjectionSelector(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionSelector)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerIndexProjectionSelector FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjectionSelector(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.cs new file mode 100644 index 000000000000..5b2e7e814d51 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionSelector.cs @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Description for what data to store in the designated search index. + public partial class SearchIndexerIndexProjectionSelector + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. + /// + /// Name of the field in the search index to map the parent document's key value + /// to. Must be a string field that is filterable and not the key field. + /// + /// + /// Source context for the projections. Represents the cardinality at which the + /// document will be split into multiple sub documents. + /// + /// + /// Mappings for the projection, or which source should be mapped to which field in + /// the target index. + /// + /// , , or is null. + public SearchIndexerIndexProjectionSelector(string targetIndexName, string parentKeyFieldName, string sourceContext, IEnumerable mappings) + { + Argument.AssertNotNull(targetIndexName, nameof(targetIndexName)); + Argument.AssertNotNull(parentKeyFieldName, nameof(parentKeyFieldName)); + Argument.AssertNotNull(sourceContext, nameof(sourceContext)); + Argument.AssertNotNull(mappings, nameof(mappings)); + + TargetIndexName = targetIndexName; + ParentKeyFieldName = parentKeyFieldName; + SourceContext = sourceContext; + Mappings = mappings.ToList(); + } + + /// Initializes a new instance of . + /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. + /// + /// Name of the field in the search index to map the parent document's key value + /// to. Must be a string field that is filterable and not the key field. + /// + /// + /// Source context for the projections. Represents the cardinality at which the + /// document will be split into multiple sub documents. + /// + /// + /// Mappings for the projection, or which source should be mapped to which field in + /// the target index. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerIndexProjectionSelector(string targetIndexName, string parentKeyFieldName, string sourceContext, IList mappings, IDictionary serializedAdditionalRawData) + { + TargetIndexName = targetIndexName; + ParentKeyFieldName = parentKeyFieldName; + SourceContext = sourceContext; + Mappings = mappings; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerIndexProjectionSelector() + { + } + + /// Name of the search index to project to. Must have a key field with the 'keyword' analyzer set. + public string TargetIndexName { get; set; } + /// + /// Name of the field in the search index to map the parent document's key value + /// to. Must be a string field that is filterable and not the key field. + /// + public string ParentKeyFieldName { get; set; } + /// + /// Source context for the projections. Represents the cardinality at which the + /// document will be split into multiple sub documents. + /// + public string SourceContext { get; set; } + /// + /// Mappings for the projection, or which source should be mapped to which field in + /// the target index. + /// + public IList Mappings { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.Serialization.cs new file mode 100644 index 000000000000..bf3bd4d9cc87 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerIndexProjectionsParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionsParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(ProjectionMode)) + { + writer.WritePropertyName("projectionMode"u8); + writer.WriteStringValue(ProjectionMode.Value.ToString()); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + SearchIndexerIndexProjectionsParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionsParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerIndexProjectionsParameters(document.RootElement, options); + } + + internal static SearchIndexerIndexProjectionsParameters DeserializeSearchIndexerIndexProjectionsParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IndexProjectionMode? projectionMode = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("projectionMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + projectionMode = new IndexProjectionMode(property.Value.GetString()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SearchIndexerIndexProjectionsParameters(projectionMode, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionsParameters)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerIndexProjectionsParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjectionsParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerIndexProjectionsParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerIndexProjectionsParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerIndexProjectionsParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.cs new file mode 100644 index 000000000000..ea93dfd8d43b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerIndexProjectionsParameters.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A dictionary of index projection-specific configuration properties. Each name + /// is the name of a specific property. Each value must be of a primitive type. + /// + public partial class SearchIndexerIndexProjectionsParameters + { + /// Initializes a new instance of . + public SearchIndexerIndexProjectionsParameters() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// Defines behavior of the index projections in relation to the rest of the + /// indexer. + /// + /// Additional Properties. + internal SearchIndexerIndexProjectionsParameters(IndexProjectionMode? projectionMode, IDictionary additionalProperties) + { + ProjectionMode = projectionMode; + AdditionalProperties = additionalProperties; + } + + /// + /// Defines behavior of the index projections in relation to the rest of the + /// indexer. + /// + public IndexProjectionMode? ProjectionMode { get; set; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.Serialization.cs new file mode 100644 index 000000000000..5a3d69097289 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerKnowledgeStoreParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerKnowledgeStoreParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(SynthesizeGeneratedKeyName)) + { + writer.WritePropertyName("synthesizeGeneratedKeyName"u8); + writer.WriteBooleanValue(SynthesizeGeneratedKeyName.Value); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + SearchIndexerKnowledgeStoreParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerKnowledgeStoreParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerKnowledgeStoreParameters(document.RootElement, options); + } + + internal static SearchIndexerKnowledgeStoreParameters DeserializeSearchIndexerKnowledgeStoreParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? synthesizeGeneratedKeyName = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("synthesizeGeneratedKeyName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + synthesizeGeneratedKeyName = property.Value.GetBoolean(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SearchIndexerKnowledgeStoreParameters(synthesizeGeneratedKeyName, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerKnowledgeStoreParameters)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerKnowledgeStoreParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerKnowledgeStoreParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerKnowledgeStoreParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerKnowledgeStoreParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerKnowledgeStoreParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.cs new file mode 100644 index 000000000000..9ac86ce6fa25 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerKnowledgeStoreParameters.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A dictionary of knowledge store-specific configuration properties. Each name is + /// the name of a specific property. Each value must be of a primitive type. + /// + public partial class SearchIndexerKnowledgeStoreParameters + { + /// Initializes a new instance of . + public SearchIndexerKnowledgeStoreParameters() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// Whether or not projections should synthesize a generated key name if one isn't + /// already present. + /// + /// Additional Properties. + internal SearchIndexerKnowledgeStoreParameters(bool? synthesizeGeneratedKeyName, IDictionary additionalProperties) + { + SynthesizeGeneratedKeyName = synthesizeGeneratedKeyName; + AdditionalProperties = additionalProperties; + } + + /// + /// Whether or not projections should synthesize a generated key name if one isn't + /// already present. + /// + public bool? SynthesizeGeneratedKeyName { get; set; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.Serialization.cs new file mode 100644 index 000000000000..e05e18c02a9a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.Serialization.cs @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerLimits : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerLimits)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(MaxRunTime)) + { + writer.WritePropertyName("maxRunTime"u8); + writer.WriteStringValue(MaxRunTime.Value, "P"); + } + if (Optional.IsDefined(MaxDocumentExtractionSize)) + { + writer.WritePropertyName("maxDocumentExtractionSize"u8); + writer.WriteNumberValue(MaxDocumentExtractionSize.Value); + } + if (Optional.IsDefined(MaxDocumentContentCharactersToExtract)) + { + writer.WritePropertyName("maxDocumentContentCharactersToExtract"u8); + writer.WriteNumberValue(MaxDocumentContentCharactersToExtract.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerLimits IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerLimits)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerLimits(document.RootElement, options); + } + + internal static SearchIndexerLimits DeserializeSearchIndexerLimits(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TimeSpan? maxRunTime = default; + long? maxDocumentExtractionSize = default; + long? maxDocumentContentCharactersToExtract = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxRunTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxRunTime = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("maxDocumentExtractionSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxDocumentExtractionSize = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("maxDocumentContentCharactersToExtract"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxDocumentContentCharactersToExtract = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerLimits(maxRunTime, maxDocumentExtractionSize, maxDocumentContentCharactersToExtract, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerLimits)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerLimits IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerLimits(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerLimits)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerLimits FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerLimits(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.cs new file mode 100644 index 000000000000..f0cbaaac8b69 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerLimits.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents the limits that can be applied to an indexer. + public partial class SearchIndexerLimits + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal SearchIndexerLimits() + { + } + + /// Initializes a new instance of . + /// The maximum duration that the indexer is permitted to run for one execution. + /// + /// The maximum size of a document, in bytes, which will be considered valid for + /// indexing. + /// + /// + /// The maximum number of characters that will be extracted from a document picked + /// up for indexing. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerLimits(TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract, IDictionary serializedAdditionalRawData) + { + MaxRunTime = maxRunTime; + MaxDocumentExtractionSize = maxDocumentExtractionSize; + MaxDocumentContentCharactersToExtract = maxDocumentContentCharactersToExtract; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The maximum duration that the indexer is permitted to run for one execution. + public TimeSpan? MaxRunTime { get; } + /// + /// The maximum size of a document, in bytes, which will be considered valid for + /// indexing. + /// + public long? MaxDocumentExtractionSize { get; } + /// + /// The maximum number of characters that will be extracted from a document picked + /// up for indexing. + /// + public long? MaxDocumentContentCharactersToExtract { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.Serialization.cs new file mode 100644 index 000000000000..253bd924767c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.Serialization.cs @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownSearchIndexerSkill))] + public partial class SearchIndexerSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(Context)) + { + writer.WritePropertyName("context"u8); + writer.WriteStringValue(Context); + } + writer.WritePropertyName("inputs"u8); + writer.WriteStartArray(); + foreach (var item in Inputs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + writer.WritePropertyName("outputs"u8); + writer.WriteStartArray(); + foreach (var item in Outputs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerSkill(document.RootElement, options); + } + + internal static SearchIndexerSkill DeserializeSearchIndexerSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Skills.Custom.AmlSkill": return AzureMachineLearningSkill.DeserializeAzureMachineLearningSkill(element, options); + case "#Microsoft.Skills.Custom.WebApiSkill": return WebApiSkill.DeserializeWebApiSkill(element, options); + case "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill": return AzureOpenAIEmbeddingSkill.DeserializeAzureOpenAIEmbeddingSkill(element, options); + case "#Microsoft.Skills.Text.CustomEntityLookupSkill": return CustomEntityLookupSkill.DeserializeCustomEntityLookupSkill(element, options); + case "#Microsoft.Skills.Text.EntityRecognitionSkill": return EntityRecognitionSkill.DeserializeEntityRecognitionSkill(element, options); + case "#Microsoft.Skills.Text.KeyPhraseExtractionSkill": return KeyPhraseExtractionSkill.DeserializeKeyPhraseExtractionSkill(element, options); + case "#Microsoft.Skills.Text.LanguageDetectionSkill": return LanguageDetectionSkill.DeserializeLanguageDetectionSkill(element, options); + case "#Microsoft.Skills.Text.MergeSkill": return MergeSkill.DeserializeMergeSkill(element, options); + case "#Microsoft.Skills.Text.PIIDetectionSkill": return Search.Documents.Indexes.Models.PiiDetectionSkill.DeserializePiiDetectionSkill(element, options); + case "#Microsoft.Skills.Text.SentimentSkill": return SentimentSkill.DeserializeSentimentSkill(element, options); + case "#Microsoft.Skills.Text.SplitSkill": return SplitSkill.DeserializeSplitSkill(element, options); + case "#Microsoft.Skills.Text.TranslationSkill": return TextTranslationSkill.DeserializeTextTranslationSkill(element, options); + case "#Microsoft.Skills.Text.V3.EntityLinkingSkill": return EntityLinkingSkill.DeserializeEntityLinkingSkill(element, options); + case "#Microsoft.Skills.Text.V3.EntityRecognitionSkill": return Search.Documents.Indexes.Models.EntityRecognitionSkillV3.DeserializeEntityRecognitionSkillV3(element, options); + case "#Microsoft.Skills.Text.V3.SentimentSkill": return Search.Documents.Indexes.Models.SentimentSkillV3.DeserializeSentimentSkillV3(element, options); + case "#Microsoft.Skills.Util.ConditionalSkill": return ConditionalSkill.DeserializeConditionalSkill(element, options); + case "#Microsoft.Skills.Util.DocumentExtractionSkill": return DocumentExtractionSkill.DeserializeDocumentExtractionSkill(element, options); + case "#Microsoft.Skills.Util.DocumentIntelligenceLayoutSkill": return DocumentIntelligenceLayoutSkill.DeserializeDocumentIntelligenceLayoutSkill(element, options); + case "#Microsoft.Skills.Util.ShaperSkill": return ShaperSkill.DeserializeShaperSkill(element, options); + case "#Microsoft.Skills.Vision.ImageAnalysisSkill": return ImageAnalysisSkill.DeserializeImageAnalysisSkill(element, options); + case "#Microsoft.Skills.Vision.OcrSkill": return OcrSkill.DeserializeOcrSkill(element, options); + case "#Microsoft.Skills.Vision.VectorizeSkill": return VisionVectorizeSkill.DeserializeVisionVectorizeSkill(element, options); + } + } + return UnknownSearchIndexerSkill.DeserializeUnknownSearchIndexerSkill(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerSkill(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.cs new file mode 100644 index 000000000000..0316899ab158 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkill.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Base type for skills. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , and . + /// + public abstract partial class SearchIndexerSkill + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + protected SearchIndexerSkill(IEnumerable inputs, IEnumerable outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + Inputs = inputs.ToList(); + Outputs = outputs.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + Description = description; + Context = context; + Inputs = inputs; + Outputs = outputs; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerSkill() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + public string Name { get; set; } + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + public string Description { get; set; } + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + public string Context { get; set; } + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + public IList Inputs { get; } + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + public IList Outputs { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.Serialization.cs new file mode 100644 index 000000000000..e1e61c2255c6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.Serialization.cs @@ -0,0 +1,252 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerSkillset : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkillset)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + writer.WritePropertyName("skills"u8); + writer.WriteStartArray(); + foreach (var item in Skills) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(CognitiveServicesAccount)) + { + writer.WritePropertyName("cognitiveServices"u8); + writer.WriteObjectValue(CognitiveServicesAccount, options); + } + if (Optional.IsDefined(KnowledgeStore)) + { + writer.WritePropertyName("knowledgeStore"u8); + writer.WriteObjectValue(KnowledgeStore, options); + } + if (Optional.IsDefined(IndexProjection)) + { + writer.WritePropertyName("indexProjections"u8); + writer.WriteObjectValue(IndexProjection, options); + } + if (Optional.IsDefined(ETag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(ETag); + } + if (Optional.IsDefined(EncryptionKey)) + { + writer.WritePropertyName("encryptionKey"u8); + writer.WriteObjectValue(EncryptionKey, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerSkillset IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkillset)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerSkillset(document.RootElement, options); + } + + internal static SearchIndexerSkillset DeserializeSearchIndexerSkillset(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string description = default; + IList skills = default; + CognitiveServicesAccount cognitiveServices = default; + Search.Documents.Indexes.Models.KnowledgeStore knowledgeStore = default; + SearchIndexerIndexProjection indexProjections = default; + string odataEtag = default; + SearchResourceEncryptionKey encryptionKey = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("skills"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SearchIndexerSkill.DeserializeSearchIndexerSkill(item, options)); + } + skills = array; + continue; + } + if (property.NameEquals("cognitiveServices"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + cognitiveServices = CognitiveServicesAccount.DeserializeCognitiveServicesAccount(property.Value, options); + continue; + } + if (property.NameEquals("knowledgeStore"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + knowledgeStore = Search.Documents.Indexes.Models.KnowledgeStore.DeserializeKnowledgeStore(property.Value, options); + continue; + } + if (property.NameEquals("indexProjections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + indexProjections = SearchIndexerIndexProjection.DeserializeSearchIndexerIndexProjection(property.Value, options); + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (property.NameEquals("encryptionKey"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerSkillset( + name, + description, + skills, + cognitiveServices, + knowledgeStore, + indexProjections, + odataEtag, + encryptionKey, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerSkillset)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerSkillset IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerSkillset(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerSkillset)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerSkillset FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerSkillset(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.cs new file mode 100644 index 000000000000..05fb3b1802b0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerSkillset.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// A list of skills. + public partial class SearchIndexerSkillset + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the skillset. + /// + /// A list of skills in the skillset. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , and . + /// + /// or is null. + public SearchIndexerSkillset(string name, IEnumerable skills) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(skills, nameof(skills)); + + Name = name; + Skills = skills.ToList(); + } + + /// Initializes a new instance of . + /// The name of the skillset. + /// The description of the skillset. + /// + /// A list of skills in the skillset. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , and . + /// + /// + /// Details about the Azure AI service to be used when running skills. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// Definition of additional projections to Azure blob, table, or files, of + /// enriched data. + /// + /// Definition of additional projections to secondary search index(es). + /// The ETag of the skillset. + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your skillset + /// definition when you want full assurance that no one, not even Microsoft, can + /// decrypt your skillset definition. Once you have encrypted your skillset + /// definition, it will always remain encrypted. The search service will ignore + /// attempts to set this property to null. You can change this property as needed + /// if you want to rotate your encryption key; Your skillset definition will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerSkillset(string name, string description, IList skills, CognitiveServicesAccount cognitiveServicesAccount, Search.Documents.Indexes.Models.KnowledgeStore knowledgeStore, SearchIndexerIndexProjection indexProjection, string eTag, SearchResourceEncryptionKey encryptionKey, IDictionary serializedAdditionalRawData) + { + Name = name; + Description = description; + Skills = skills; + CognitiveServicesAccount = cognitiveServicesAccount; + KnowledgeStore = knowledgeStore; + IndexProjection = indexProjection; + ETag = eTag; + EncryptionKey = encryptionKey; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerSkillset() + { + } + + /// The name of the skillset. + public string Name { get; set; } + /// The description of the skillset. + public string Description { get; set; } + /// + /// A list of skills in the skillset. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , and . + /// + public IList Skills { get; } + /// + /// Details about the Azure AI service to be used when running skills. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public CognitiveServicesAccount CognitiveServicesAccount { get; set; } + /// + /// Definition of additional projections to Azure blob, table, or files, of + /// enriched data. + /// + public Search.Documents.Indexes.Models.KnowledgeStore KnowledgeStore { get; set; } + /// Definition of additional projections to secondary search index(es). + public SearchIndexerIndexProjection IndexProjection { get; set; } + /// The ETag of the skillset. + public string ETag { get; set; } + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your skillset + /// definition when you want full assurance that no one, not even Microsoft, can + /// decrypt your skillset definition. Once you have encrypted your skillset + /// definition, it will always remain encrypted. The search service will ignore + /// attempts to set this property to null. You can change this property as needed + /// if you want to rotate your encryption key; Your skillset definition will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + public SearchResourceEncryptionKey EncryptionKey { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.Serialization.cs new file mode 100644 index 000000000000..c9a284659cd1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerStatus : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerStatus)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (Optional.IsDefined(LastResult)) + { + writer.WritePropertyName("lastResult"u8); + writer.WriteObjectValue(LastResult, options); + } + writer.WritePropertyName("executionHistory"u8); + writer.WriteStartArray(); + foreach (var item in ExecutionHistory) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + writer.WritePropertyName("limits"u8); + writer.WriteObjectValue(Limits, options); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerStatus IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerStatus)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerStatus(document.RootElement, options); + } + + internal static SearchIndexerStatus DeserializeSearchIndexerStatus(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IndexerStatus status = default; + IndexerExecutionResult lastResult = default; + IReadOnlyList executionHistory = default; + SearchIndexerLimits limits = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("status"u8)) + { + status = new IndexerStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("lastResult"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastResult = IndexerExecutionResult.DeserializeIndexerExecutionResult(property.Value, options); + continue; + } + if (property.NameEquals("executionHistory"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(IndexerExecutionResult.DeserializeIndexerExecutionResult(item, options)); + } + executionHistory = array; + continue; + } + if (property.NameEquals("limits"u8)) + { + limits = SearchIndexerLimits.DeserializeSearchIndexerLimits(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerStatus(status, lastResult, executionHistory, limits, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerStatus)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerStatus IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerStatus(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerStatus)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerStatus FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerStatus(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.cs new file mode 100644 index 000000000000..0a737db70fc2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerStatus.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Represents the current status and execution history of an indexer. + public partial class SearchIndexerStatus + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Overall indexer status. + /// History of the recent indexer executions, sorted in reverse chronological order. + /// The execution limits for the indexer. + /// or is null. + internal SearchIndexerStatus(IndexerStatus status, IEnumerable executionHistory, SearchIndexerLimits limits) + { + Argument.AssertNotNull(executionHistory, nameof(executionHistory)); + Argument.AssertNotNull(limits, nameof(limits)); + + Status = status; + ExecutionHistory = executionHistory.ToList(); + Limits = limits; + } + + /// Initializes a new instance of . + /// Overall indexer status. + /// The result of the most recent or an in-progress indexer execution. + /// History of the recent indexer executions, sorted in reverse chronological order. + /// The execution limits for the indexer. + /// Keeps track of any properties unknown to the library. + internal SearchIndexerStatus(IndexerStatus status, IndexerExecutionResult lastResult, IReadOnlyList executionHistory, SearchIndexerLimits limits, IDictionary serializedAdditionalRawData) + { + Status = status; + LastResult = lastResult; + ExecutionHistory = executionHistory; + Limits = limits; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerStatus() + { + } + + /// Overall indexer status. + public IndexerStatus Status { get; } + /// The result of the most recent or an in-progress indexer execution. + public IndexerExecutionResult LastResult { get; } + /// History of the recent indexer executions, sorted in reverse chronological order. + public IReadOnlyList ExecutionHistory { get; } + /// The execution limits for the indexer. + public SearchIndexerLimits Limits { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.Serialization.cs new file mode 100644 index 000000000000..c1670e965f83 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.Serialization.cs @@ -0,0 +1,192 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchIndexerWarning : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerWarning)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Key)) + { + writer.WritePropertyName("key"u8); + writer.WriteStringValue(Key); + } + writer.WritePropertyName("message"u8); + writer.WriteStringValue(Message); + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(Details)) + { + writer.WritePropertyName("details"u8); + writer.WriteStringValue(Details); + } + if (Optional.IsDefined(DocumentationLink)) + { + writer.WritePropertyName("documentationLink"u8); + writer.WriteStringValue(DocumentationLink); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchIndexerWarning IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerWarning)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerWarning(document.RootElement, options); + } + + internal static SearchIndexerWarning DeserializeSearchIndexerWarning(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string key = default; + string message = default; + string name = default; + string details = default; + string documentationLink = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key"u8)) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("message"u8)) + { + message = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("details"u8)) + { + details = property.Value.GetString(); + continue; + } + if (property.NameEquals("documentationLink"u8)) + { + documentationLink = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchIndexerWarning( + key, + message, + name, + details, + documentationLink, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerWarning)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerWarning IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerWarning(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerWarning)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchIndexerWarning FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerWarning(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.cs new file mode 100644 index 000000000000..70edfea550cc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchIndexerWarning.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents an item-level warning. + public partial class SearchIndexerWarning + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The message describing the warning that occurred while processing the item. + /// is null. + internal SearchIndexerWarning(string message) + { + Argument.AssertNotNull(message, nameof(message)); + + Message = message; + } + + /// Initializes a new instance of . + /// The key of the item which generated a warning. + /// The message describing the warning that occurred while processing the item. + /// + /// The name of the source at which the warning originated. For example, this could + /// refer to a particular skill in the attached skillset. This may not be always + /// available. + /// + /// + /// Additional, verbose details about the warning to assist in debugging the + /// indexer. This may not be always available. + /// + /// + /// A link to a troubleshooting guide for these classes of warnings. This may not + /// be always available. + /// + /// Keeps track of any properties unknown to the library. + internal SearchIndexerWarning(string key, string message, string name, string details, string documentationLink, IDictionary serializedAdditionalRawData) + { + Key = key; + Message = message; + Name = name; + Details = details; + DocumentationLink = documentationLink; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchIndexerWarning() + { + } + + /// The key of the item which generated a warning. + public string Key { get; } + /// The message describing the warning that occurred while processing the item. + public string Message { get; } + /// + /// The name of the source at which the warning originated. For example, this could + /// refer to a particular skill in the attached skillset. This may not be always + /// available. + /// + public string Name { get; } + /// + /// Additional, verbose details about the warning to assist in debugging the + /// indexer. This may not be always available. + /// + public string Details { get; } + /// + /// A link to a troubleshooting guide for these classes of warnings. This may not + /// be always available. + /// + public string DocumentationLink { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchMode.cs new file mode 100644 index 000000000000..67b966a46b57 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchMode.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// Specifies whether any or all of the search terms must be matched in order to + /// count the document as a match. + /// + public readonly partial struct SearchMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SearchMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string AnyValue = "any"; + private const string AllValue = "all"; + + /// + /// Any of the search terms must be matched in order to count the document as a + /// match. + /// + public static SearchMode Any { get; } = new SearchMode(AnyValue); + /// + /// All of the search terms must be matched in order to count the document as a + /// match. + /// + public static SearchMode All { get; } = new SearchMode(AllValue); + /// Determines if two values are the same. + public static bool operator ==(SearchMode left, SearchMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SearchMode left, SearchMode right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SearchMode(string value) => new SearchMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SearchMode other && Equals(other); + /// + public bool Equals(SearchMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchModelFactory.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchModelFactory.cs index 6b392475902e..bd1448d26563 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/SearchModelFactory.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchModelFactory.cs @@ -15,179 +15,407 @@ namespace Azure.Search.Documents.Models /// Model factory for models. public static partial class SearchModelFactory { - /// Initializes a new instance of . - /// The approximate count of documents falling within the bucket described by this facet. - /// The nested facet query results for the search operation, organized as a collection of buckets for each faceted field; null if the query did not contain any nested facets. - /// Additional Properties. - /// A new instance for mocking. - public static FacetResult FacetResult(long? count = null, IReadOnlyDictionary> facets = null, IReadOnlyDictionary additionalProperties = null) + /// Initializes a new instance of . + /// Total number of aliases. + /// Total number of documents across all indexes in the service. + /// Total number of indexes. + /// Total number of indexers. + /// Total number of data sources. + /// Total size of used storage in bytes. + /// Total number of synonym maps. + /// Total number of skillsets. + /// Total memory consumption of all vector indexes within the service, in bytes. + /// A new instance for mocking. + public static SearchServiceCounters SearchServiceCounters(Search.Documents.Indexes.Models.SearchResourceCounter aliasCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter documentCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter indexCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter indexerCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter dataSourceCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter storageSizeCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter synonymMapCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter skillsetCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null, Search.Documents.Indexes.Models.SearchResourceCounter vectorIndexSizeCounter = (Search.Documents.Indexes.Models.SearchResourceCounter)null) { - facets ??= new Dictionary>(); - additionalProperties ??= new Dictionary(); + return new SearchServiceCounters( + aliasCounter, + documentCounter, + indexCounter, + indexerCounter, + dataSourceCounter, + storageSizeCounter, + synonymMapCounter, + skillsetCounter, + vectorIndexSizeCounter, + serializedAdditionalRawData: null); + } - return new FacetResult(count, facets, additionalProperties); + /// Initializes a new instance of . + /// The maximum allowed fields per index. + /// + /// The maximum depth which you can nest sub-fields in an index, including the + /// top-level complex field. For example, a/b/c has a nesting depth of 3. + /// + /// + /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an + /// index. + /// + /// The maximum number of objects in complex collections allowed per document. + /// The maximum amount of storage in bytes allowed per index. + /// A new instance for mocking. + public static SearchServiceLimits SearchServiceLimits(int? maxFieldsPerIndex = null, int? maxFieldNestingDepthPerIndex = null, int? maxComplexCollectionFieldsPerIndex = null, int? maxComplexObjectsInCollectionsPerDocument = null, long? maxStoragePerIndexInBytes = null) + { + return new SearchServiceLimits( + maxFieldsPerIndex, + maxFieldNestingDepthPerIndex, + maxComplexCollectionFieldsPerIndex, + maxComplexObjectsInCollectionsPerDocument, + maxStoragePerIndexInBytes, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The name of the index. + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// The amount of memory in bytes consumed by vectors in the index. + /// A new instance for mocking. + public static IndexStatisticsSummary IndexStatisticsSummary(string name = null, long documentCount = default, long storageSize = default, long? vectorIndexSize = null) + { + return new IndexStatisticsSummary(name, documentCount, storageSize, vectorIndexSize, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The score value represents how relevant the answer is to the query relative to other answers returned for the query. + /// Initializes a new instance of . + /// + /// The score value represents how relevant the answer is to the query relative to + /// other answers returned for the query. + /// /// The key of the document the answer was extracted from. /// The text passage extracted from the document contents as the answer. - /// Same text passage as in the Text property with highlighted text phrases most relevant to the query. + /// + /// Same text passage as in the Text property with highlighted text phrases most + /// relevant to the query. + /// /// Additional Properties. - /// A new instance for mocking. - public static QueryAnswerResult QueryAnswerResult(double? score = null, string key = null, string text = null, string highlights = null, IReadOnlyDictionary additionalProperties = null) + /// A new instance for mocking. + public static QueryAnswerResult QueryAnswerResult(double? score = null, string key = null, string text = null, string highlights = null, IReadOnlyDictionary additionalProperties = null) { - additionalProperties ??= new Dictionary(); + additionalProperties ??= new Dictionary(); return new QueryAnswerResult(score, key, text, highlights, additionalProperties); } - /// Initializes a new instance of . - /// A representative text passage extracted from the document most relevant to the search query. - /// Same text passage as in the Text property with highlighted phrases most relevant to the query. + /// Initializes a new instance of . + /// Contains debugging information specific to query rewrites. + /// A new instance for mocking. + public static DebugInfo DebugInfo(QueryRewritesDebugInfo queryRewrites = null) + { + return new DebugInfo(queryRewrites, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// List of query rewrites generated for the text query. + /// List of query rewrites generated for the vectorizable text queries. + /// A new instance for mocking. + public static QueryRewritesDebugInfo QueryRewritesDebugInfo(QueryRewritesValuesDebugInfo text = null, IEnumerable vectors = null) + { + vectors ??= new List(); + + return new QueryRewritesDebugInfo(text, vectors?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// + /// The input text to the generative query rewriting model. There may be cases + /// where the user query and the input to the generative model are not identical. + /// + /// List of query rewrites. + /// A new instance for mocking. + public static QueryRewritesValuesDebugInfo QueryRewritesValuesDebugInfo(string inputQuery = null, IEnumerable rewrites = null) + { + rewrites ??= new List(); + + return new QueryRewritesValuesDebugInfo(inputQuery, rewrites?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// + /// A representative text passage extracted from the document most relevant to the + /// search query. + /// + /// + /// Same text passage as in the Text property with highlighted phrases most + /// relevant to the query. + /// /// Additional Properties. - /// A new instance for mocking. - public static QueryCaptionResult QueryCaptionResult(string text = null, string highlights = null, IReadOnlyDictionary additionalProperties = null) + /// A new instance for mocking. + public static QueryCaptionResult QueryCaptionResult(string text = null, string highlights = null, IReadOnlyDictionary additionalProperties = null) { - additionalProperties ??= new Dictionary(); + additionalProperties ??= new Dictionary(); return new QueryCaptionResult(text, highlights, additionalProperties); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// Contains debugging information specific to semantic ranking requests. /// Contains debugging information specific to vector and hybrid search. - /// A new instance for mocking. + /// A new instance for mocking. public static DocumentDebugInfo DocumentDebugInfo(SemanticDebugInfo semantic = null, VectorsDebugInfo vectors = null) { - return new DocumentDebugInfo(semantic, vectors); + return new DocumentDebugInfo(semantic, vectors, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The title field that was sent to the semantic enrichment process, as well as how it was used. - /// The content fields that were sent to the semantic enrichment process, as well as how they were used. - /// The keyword fields that were sent to the semantic enrichment process, as well as how they were used. + /// Initializes a new instance of . + /// + /// The title field that was sent to the semantic enrichment process, as well as + /// how it was used + /// + /// + /// The content fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// + /// + /// The keyword fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// /// The raw concatenated strings that were sent to the semantic enrichment process. - /// A new instance for mocking. + /// A new instance for mocking. public static SemanticDebugInfo SemanticDebugInfo(QueryResultDocumentSemanticField titleField = null, IEnumerable contentFields = null, IEnumerable keywordFields = null, QueryResultDocumentRerankerInput rerankerInput = null) { contentFields ??= new List(); keywordFields ??= new List(); - return new SemanticDebugInfo(titleField, contentFields?.ToList(), keywordFields?.ToList(), rerankerInput); + return new SemanticDebugInfo(titleField, contentFields?.ToList(), keywordFields?.ToList(), rerankerInput, serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the field that was sent to the semantic enrichment process. - /// The way the field was used for the semantic enrichment process (fully used, partially used, or unused). - /// A new instance for mocking. + /// + /// The way the field was used for the semantic enrichment process (fully used, + /// partially used, or unused) + /// + /// A new instance for mocking. public static QueryResultDocumentSemanticField QueryResultDocumentSemanticField(string name = null, SemanticFieldState? state = null) { - return new QueryResultDocumentSemanticField(name, state); + return new QueryResultDocumentSemanticField(name, state, serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The raw string for the title field that was used for semantic enrichment. - /// The raw concatenated strings for the content fields that were used for semantic enrichment. - /// The raw concatenated strings for the keyword fields that were used for semantic enrichment. - /// A new instance for mocking. + /// + /// The raw concatenated strings for the content fields that were used for semantic + /// enrichment. + /// + /// + /// The raw concatenated strings for the keyword fields that were used for semantic + /// enrichment. + /// + /// A new instance for mocking. public static QueryResultDocumentRerankerInput QueryResultDocumentRerankerInput(string title = null, string content = null, string keywords = null) { - return new QueryResultDocumentRerankerInput(title, content, keywords); + return new QueryResultDocumentRerankerInput(title, content, keywords, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The breakdown of subscores of the document prior to the chosen result set fusion/combination method such as RRF. - /// A new instance for mocking. + /// Initializes a new instance of . + /// + /// The breakdown of subscores of the document prior to the chosen result set + /// fusion/combination method such as RRF. + /// + /// A new instance for mocking. public static VectorsDebugInfo VectorsDebugInfo(QueryResultDocumentSubscores subscores = null) { - return new VectorsDebugInfo(subscores); + return new VectorsDebugInfo(subscores, serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The BM25 or Classic score for the text portion of the query. /// The vector similarity and @search.score values for each vector query. /// The BM25 or Classic score for the text portion of the query. - /// A new instance for mocking. + /// A new instance for mocking. public static QueryResultDocumentSubscores QueryResultDocumentSubscores(TextResult text = null, IEnumerable> vectors = null, double? documentBoost = null) { vectors ??= new List>(); - return new QueryResultDocumentSubscores(text, vectors?.ToList(), documentBoost); + return new QueryResultDocumentSubscores(text, vectors?.ToList(), documentBoost, serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The BM25 or Classic score for the text portion of the query. - /// A new instance for mocking. + /// A new instance for mocking. public static TextResult TextResult(double? searchScore = null) { - return new TextResult(searchScore); + return new TextResult(searchScore, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The @search.score value that is calculated from the vector similarity score. This is the score that's visible in a pure single-field single-vector query. - /// The vector similarity score for this document. Note this is the canonical definition of similarity metric, not the 'distance' version. For example, cosine similarity instead of cosine distance. - /// A new instance for mocking. + /// Initializes a new instance of . + /// + /// The @search.score value that is calculated from the vector similarity score. + /// This is the score that's visible in a pure single-field single-vector query. + /// + /// + /// The vector similarity score for this document. Note this is the canonical + /// definition of similarity metric, not the 'distance' version. For example, + /// cosine similarity instead of cosine distance. + /// + /// A new instance for mocking. public static SingleVectorFieldResult SingleVectorFieldResult(double? searchScore = null, double? vectorSimilarity = null) { - return new SingleVectorFieldResult(searchScore, vectorSimilarity); + return new SingleVectorFieldResult(searchScore, vectorSimilarity, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// Contains debugging information specific to query rewrites. - /// A new instance for mocking. - public static DebugInfo DebugInfo(QueryRewritesDebugInfo queryRewrites = null) + /// Initializes a new instance of . + /// Additional Properties. + /// A new instance for mocking. + public static LookupDocument LookupDocument(IReadOnlyDictionary additionalProperties = null) { - return new DebugInfo(queryRewrites); + additionalProperties ??= new Dictionary(); + + return new LookupDocument(additionalProperties); } - /// Initializes a new instance of . - /// List of query rewrites generated for the text query. - /// List of query rewrites generated for the vectorizable text queries. - /// A new instance for mocking. - public static QueryRewritesDebugInfo QueryRewritesDebugInfo(QueryRewritesValuesDebugInfo text = null, IEnumerable vectors = null) + /// Initializes a new instance of . + /// + /// A value indicating the percentage of the index that was considered by the + /// autocomplete request, or null if minimumCoverage was not specified in the + /// request. + /// + /// The list of returned Autocompleted items. + /// A new instance for mocking. + public static AutocompleteResults AutocompleteResults(double? coverage = null, IEnumerable results = null) { - vectors ??= new List(); + results ??= new List(); - return new QueryRewritesDebugInfo(text, vectors?.ToList()); + return new AutocompleteResults(coverage, results?.ToList(), serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The input text to the generative query rewriting model. There may be cases where the user query and the input to the generative model are not identical. - /// List of query rewrites. - /// A new instance for mocking. - public static QueryRewritesValuesDebugInfo QueryRewritesValuesDebugInfo(string inputQuery = null, IEnumerable rewrites = null) + /// Initializes a new instance of . + /// The name of the suggester. + /// A value indicating the capabilities of the suggester. + /// + /// The list of field names to which the suggester applies. Each field must be + /// searchable. + /// + /// A new instance for mocking. + public static SearchSuggester SearchSuggester(string name = null, SearchSuggesterSearchMode searchMode = default, IEnumerable sourceFields = null) { - rewrites ??= new List(); + sourceFields ??= new List(); - return new QueryRewritesValuesDebugInfo(inputQuery, rewrites?.ToList()); + return new SearchSuggester(name, searchMode, sourceFields?.ToList(), serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// A value indicating the percentage of the index that was considered by the autocomplete request, or null if minimumCoverage was not specified in the request. - /// The list of returned Autocompleted items. - /// A new instance for mocking. - public static AutocompleteResults AutocompleteResults(double? coverage = null, IEnumerable results = null) + /// Initializes a new instance of . + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// The amount of memory in bytes consumed by vectors in the index. + /// A new instance for mocking. + public static Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount = default, long storageSize = default, long vectorIndexSize = default) { - results ??= new List(); + return new Search.Documents.Indexes.Models.SearchIndexStatistics(documentCount, storageSize, vectorIndexSize, serializedAdditionalRawData: null); + } - return new AutocompleteResults(coverage, results?.ToList()); + /// Initializes a new instance of . + /// The text to break into tokens. + /// + /// The name of the analyzer to use to break the given text. If this parameter is + /// not specified, you must specify a tokenizer instead. The tokenizer and analyzer + /// parameters are mutually exclusive. + /// + /// + /// The name of the tokenizer to use to break the given text. If this parameter is + /// not specified, you must specify an analyzer instead. The tokenizer and analyzer + /// parameters are mutually exclusive. + /// + /// The name of the normalizer to use to normalize the given text. + /// + /// An optional list of token filters to use when breaking the given text. This + /// parameter can only be set when using the tokenizer parameter. + /// + /// + /// An optional list of character filters to use when breaking the given text. This + /// parameter can only be set when using the tokenizer parameter. + /// + /// A new instance for mocking. + public static Search.Documents.Indexes.Models.AnalyzeTextOptions AnalyzeTextOptions(string text = null, LexicalAnalyzerName? analyzerName = null, LexicalTokenizerName? tokenizerName = null, LexicalNormalizerName? normalizerName = null, IEnumerable tokenFilters = null, IEnumerable charFilters = null) + { + tokenFilters ??= new List(); + charFilters ??= new List(); + + return new Search.Documents.Indexes.Models.AnalyzeTextOptions( + text, + analyzerName, + tokenizerName, + normalizerName, + tokenFilters?.ToList(), + charFilters?.ToList(), + serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . + /// The name of the synonym map. + /// The format of the synonym map. Only the 'solr' format is currently supported. + /// + /// A series of synonym rules in the specified synonym map format. The rules must + /// be separated by newlines. + /// + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your data when + /// you want full assurance that no one, not even Microsoft, can decrypt your data. + /// Once you have encrypted your data, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your data will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + /// The ETag of the synonym map. + /// A new instance for mocking. + public static SynonymMap SynonymMap(string name = null, SynonymMapFormat format = default, string synonyms = null, SearchResourceEncryptionKey encryptionKey = null, string eTag = null) + { + return new SynonymMap( + name, + format, + synonyms, + encryptionKey, + eTag, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The synonym maps in the Search service. + /// A new instance for mocking. + public static ListSynonymMapsResult ListSynonymMapsResult(IEnumerable synonymMaps = null) + { + synonymMaps ??= new List(); + + return new ListSynonymMapsResult(synonymMaps?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The skillsets defined in the Search service. + /// A new instance for mocking. + public static ListSkillsetsResult ListSkillsetsResult(IEnumerable skillsets = null) + { + skillsets ??= new List(); + + return new ListSkillsetsResult(skillsets?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The indexers in the Search service. + /// A new instance for mocking. + public static ListIndexersResult ListIndexersResult(IEnumerable indexers = null) + { + indexers ??= new List(); + + return new ListIndexersResult(indexers?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . /// Overall indexer status. /// The result of the most recent or an in-progress indexer execution. /// History of the recent indexer executions, sorted in reverse chronological order. /// The execution limits for the indexer. - /// A new instance for mocking. + /// A new instance for mocking. public static SearchIndexerStatus SearchIndexerStatus(IndexerStatus status = default, IndexerExecutionResult lastResult = null, IEnumerable executionHistory = null, SearchIndexerLimits limits = null) { executionHistory ??= new List(); - return new SearchIndexerStatus(status, lastResult, executionHistory?.ToList(), limits); + return new SearchIndexerStatus(status, lastResult, executionHistory?.ToList(), limits, serializedAdditionalRawData: null); } - /// Initializes a new instance of . + /// Initializes a new instance of . /// The outcome of this indexer execution. /// The outcome of this indexer execution. /// All of the state that defines and dictates the indexer's current execution. @@ -196,12 +424,16 @@ public static SearchIndexerStatus SearchIndexerStatus(IndexerStatus status = def /// The end time of this indexer execution, if the execution has already completed. /// The item-level indexing errors. /// The item-level indexing warnings. - /// The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. + /// + /// The number of items that were processed during this indexer execution. This + /// includes both successfully processed items and items where indexing was + /// attempted but failed. + /// /// The number of items that failed to be indexed during this indexer execution. /// Change tracking state with which an indexer execution started. /// Change tracking state with which an indexer execution finished. - /// A new instance for mocking. - public static IndexerExecutionResult IndexerExecutionResult(IndexerExecutionStatus status = default, IndexerExecutionStatusDetail? statusDetail = null, IndexerState currentState = null, string errorMessage = null, DateTimeOffset? startTime = null, DateTimeOffset? endTime = null, IEnumerable errors = null, IEnumerable warnings = null, int itemCount = default, int failedItemCount = default, string initialTrackingState = null, string finalTrackingState = null) + /// A new instance for mocking. + public static IndexerExecutionResult IndexerExecutionResult(IndexerExecutionStatus status = default, IndexerExecutionStatusDetail? statusDetail = null, Search.Documents.Indexes.Models.IndexerState currentState = (Search.Documents.Indexes.Models.IndexerState)null, string errorMessage = null, DateTimeOffset? startTime = null, DateTimeOffset? endTime = null, IEnumerable errors = null, IEnumerable warnings = null, int itemCount = default, int failedItemCount = default, string initialTrackingState = null, string finalTrackingState = null) { errors ??= new List(); warnings ??= new List(); @@ -218,92 +450,18 @@ public static IndexerExecutionResult IndexerExecutionResult(IndexerExecutionStat itemCount, failedItemCount, initialTrackingState, - finalTrackingState); + finalTrackingState, + serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The number of documents in the index. - /// The amount of storage in bytes consumed by the index. - /// The amount of memory in bytes consumed by vectors in the index. - /// A new instance for mocking. - public static SearchIndexStatistics SearchIndexStatistics(long documentCount = default, long storageSize = default, long vectorIndexSize = default) + /// Initializes a new instance of . + /// The datasources in the Search service. + /// A new instance for mocking. + public static ListDataSourcesResult ListDataSourcesResult(IEnumerable dataSources = null) { - return new SearchIndexStatistics(documentCount, storageSize, vectorIndexSize); - } - - /// Initializes a new instance of . - /// Total number of aliases. - /// Total number of documents across all indexes in the service. - /// Total number of indexes. - /// Total number of indexers. - /// Total number of data sources. - /// Total size of used storage in bytes. - /// Total number of synonym maps. - /// Total number of skillsets. - /// Total memory consumption of all vector indexes within the service, in bytes. - /// , , , , , , , or is null. - /// A new instance for mocking. - public static SearchServiceCounters SearchServiceCounters(SearchResourceCounter aliasCounter = null, SearchResourceCounter documentCounter = null, SearchResourceCounter indexCounter = null, SearchResourceCounter indexerCounter = null, SearchResourceCounter dataSourceCounter = null, SearchResourceCounter storageSizeCounter = null, SearchResourceCounter synonymMapCounter = null, SearchResourceCounter skillsetCounter = null, SearchResourceCounter vectorIndexSizeCounter = null) - { - if (aliasCounter == null) - { - throw new ArgumentNullException(nameof(aliasCounter)); - } - if (documentCounter == null) - { - throw new ArgumentNullException(nameof(documentCounter)); - } - if (indexCounter == null) - { - throw new ArgumentNullException(nameof(indexCounter)); - } - if (indexerCounter == null) - { - throw new ArgumentNullException(nameof(indexerCounter)); - } - if (dataSourceCounter == null) - { - throw new ArgumentNullException(nameof(dataSourceCounter)); - } - if (storageSizeCounter == null) - { - throw new ArgumentNullException(nameof(storageSizeCounter)); - } - if (synonymMapCounter == null) - { - throw new ArgumentNullException(nameof(synonymMapCounter)); - } - if (skillsetCounter == null) - { - throw new ArgumentNullException(nameof(skillsetCounter)); - } - if (vectorIndexSizeCounter == null) - { - throw new ArgumentNullException(nameof(vectorIndexSizeCounter)); - } - - return new SearchServiceCounters( - aliasCounter, - documentCounter, - indexCounter, - indexerCounter, - dataSourceCounter, - storageSizeCounter, - synonymMapCounter, - skillsetCounter, - vectorIndexSizeCounter); - } + dataSources ??= new List(); - /// Initializes a new instance of . - /// The maximum allowed fields per index. - /// The maximum depth which you can nest sub-fields in an index, including the top-level complex field. For example, a/b/c has a nesting depth of 3. - /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an index. - /// The maximum number of objects in complex collections allowed per document. - /// The maximum amount of storage in bytes allowed per index. - /// A new instance for mocking. - public static SearchServiceLimits SearchServiceLimits(int? maxFieldsPerIndex = null, int? maxFieldNestingDepthPerIndex = null, int? maxComplexCollectionFieldsPerIndex = null, int? maxComplexObjectsInCollectionsPerDocument = null, long? maxStoragePerIndexInBytes = null) - { - return new SearchServiceLimits(maxFieldsPerIndex, maxFieldNestingDepthPerIndex, maxComplexCollectionFieldsPerIndex, maxComplexObjectsInCollectionsPerDocument, maxStoragePerIndexInBytes); + return new ListDataSourcesResult(dataSources?.ToList(), serializedAdditionalRawData: null); } } } diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.Serialization.cs similarity index 80% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.Serialization.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.Serialization.cs index 6faee393e77c..12ce9a022c47 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchOptions.Serialization.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.Serialization.cs @@ -5,6 +5,8 @@ #nullable disable +using System; +using System.ClientModel.Primitives; using System.Collections.Generic; using System.Text.Json; using Azure.Core; @@ -12,11 +14,27 @@ namespace Azure.Search.Documents { - public partial class SearchOptions : IUtf8JsonSerializable + public partial class SearchOptions : IUtf8JsonSerializable, IJsonModel { - void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchOptions)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(IncludeTotalCount)) { writer.WritePropertyName("count"u8); @@ -70,7 +88,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(ScoringStatistics)) { writer.WritePropertyName("scoringStatistics"u8); - writer.WriteStringValue(ScoringStatistics.Value.ToSerialString()); + writer.WriteStringValue(ScoringStatistics.Value.ToString()); } if (Optional.IsDefined(SessionId)) { @@ -110,7 +128,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(SearchMode)) { writer.WritePropertyName("searchMode"u8); - writer.WriteStringValue(SearchMode.Value.ToSerialString()); + writer.WriteStringValue(SearchMode.Value.ToString()); } if (Optional.IsDefined(QueryLanguage)) { @@ -149,15 +167,8 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) } if (Optional.IsDefined(SemanticMaxWaitInMilliseconds)) { - if (SemanticMaxWaitInMilliseconds != null) - { - writer.WritePropertyName("semanticMaxWaitInMilliseconds"u8); - writer.WriteNumberValue(SemanticMaxWaitInMilliseconds.Value); - } - else - { - writer.WriteNull("semanticMaxWaitInMilliseconds"); - } + writer.WritePropertyName("semanticMaxWaitInMilliseconds"u8); + writer.WriteNumberValue(SemanticMaxWaitInMilliseconds.Value); } if (Optional.IsDefined(SemanticQuery)) { @@ -190,7 +201,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WriteStartArray(); foreach (var item in VectorQueries) { - writer.WriteObjectValue(item); + writer.WriteObjectValue(item, options); } writer.WriteEndArray(); } @@ -202,13 +213,41 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(HybridSearch)) { writer.WritePropertyName("hybridSearch"u8); - writer.WriteObjectValue(HybridSearch); + writer.WriteObjectValue(HybridSearch, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } } - writer.WriteEndObject(); } - internal static SearchOptions DeserializeSearchOptions(JsonElement element) + SearchOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchOptions(document.RootElement, options); + } + + internal static SearchOptions DeserializeSearchOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + if (element.ValueKind == JsonValueKind.Null) { return null; @@ -246,6 +285,8 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) IList vectorQueries = default; VectorFilterMode? vectorFilterMode = default; HybridSearch hybridSearch = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) { if (property.NameEquals("count"u8)) @@ -320,7 +361,7 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) { continue; } - scoringStatistics = property.Value.GetString().ToScoringStatistics(); + scoringStatistics = new ScoringStatistics(property.Value.GetString()); continue; } if (property.NameEquals("sessionId"u8)) @@ -372,7 +413,7 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) { continue; } - searchMode = property.Value.GetString().ToSearchMode(); + searchMode = new SearchMode(property.Value.GetString()); continue; } if (property.NameEquals("queryLanguage"u8)) @@ -434,7 +475,6 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) { if (property.Value.ValueKind == JsonValueKind.Null) { - semanticMaxWaitInMilliseconds = null; continue; } semanticMaxWaitInMilliseconds = property.Value.GetInt32(); @@ -474,7 +514,7 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) List array = new List(); foreach (var item in property.Value.EnumerateArray()) { - array.Add(VectorQuery.DeserializeVectorQuery(item)); + array.Add(VectorQuery.DeserializeVectorQuery(item, options)); } vectorQueries = array; continue; @@ -494,10 +534,15 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) { continue; } - hybridSearch = HybridSearch.DeserializeHybridSearch(property.Value); + hybridSearch = HybridSearch.DeserializeHybridSearch(property.Value, options); continue; } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } } + serializedAdditionalRawData = rawDataDictionary; return new SearchOptions( count, facets ?? new ChangeTrackingList(), @@ -531,9 +576,41 @@ internal static SearchOptions DeserializeSearchOptions(JsonElement element) semanticFields, vectorQueries ?? new ChangeTrackingList(), vectorFilterMode, - hybridSearch); + hybridSearch, + serializedAdditionalRawData); } + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchOptions)} does not support writing '{options.Format}' format."); + } + } + + SearchOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + /// Deserializes the model from a raw response. /// The response to deserialize the model from. internal static SearchOptions FromResponse(Response response) @@ -546,7 +623,7 @@ internal static SearchOptions FromResponse(Response response) internal virtual RequestContent ToRequestContent() { var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(this); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); return content; } } diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.cs new file mode 100644 index 000000000000..caa9a76da4a0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchOptions.cs @@ -0,0 +1,306 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + /// + /// Parameters for filtering, sorting, faceting, paging, and other search query + /// behaviors. + /// + public partial class SearchOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public SearchOptions() + { + Facets = new ChangeTrackingList(); + ScoringParameters = new ChangeTrackingList(); + VectorQueries = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// A value that specifies whether to fetch the total count of results. Default is + /// false. Setting this value to true may have a performance impact. Note that the + /// count returned is an approximation. + /// + /// + /// The list of facet expressions to apply to the search query. Each facet + /// expression contains a field name, optionally followed by a comma-separated list + /// of name:value pairs. + /// + /// The OData $filter expression to apply to the search query. + /// + /// The comma-separated list of field names to use for hit highlights. Only + /// searchable fields can be used for hit highlighting. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + /// + /// The comma-separated list of OData $orderby expressions by which to sort the + /// results. Each expression can be either a field name or a call to either the + /// geo.distance() or the search.score() functions. Each expression can be followed + /// by asc to indicate ascending, or desc to indicate descending. The default is + /// ascending order. Ties will be broken by the match scores of documents. If no + /// $orderby is specified, the default sort order is descending by document match + /// score. There can be at most 32 $orderby clauses. + /// + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. + /// + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. The default is 'local'. Use 'global' to aggregate scoring statistics + /// globally before scoring. Using global scoring statistics can increase latency + /// of search queries. + /// + /// + /// A value to be used to create a sticky session, which can help getting more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + /// + /// The list of parameter values to be used in scoring functions (for example, + /// referencePointParameter) using the format name-values. For example, if the + /// scoring profile defines a function with a parameter called 'mylocation' the + /// parameter string would be "mylocation--122.2,44.8" (without the quotes). + /// + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + /// + /// Enables a debugging tool that can be used to further explore your reranked + /// results. + /// + /// + /// A full-text search query expression; Use "*" or omit this parameter to match + /// all documents. + /// + /// + /// The comma-separated list of field names to which to scope the full-text search. + /// When using fielded search (fieldName:searchExpression) in a full Lucene query, + /// the field names of each fielded search expression take precedence over any + /// field names listed in this parameter. + /// + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. + /// + /// A value that specifies the language of the search query. + /// + /// A value that specified the type of the speller to use to spell-correct + /// individual search query terms. + /// + /// + /// The comma-separated list of fields to retrieve. If unspecified, all fields + /// marked as retrievable in the schema are included. + /// + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use skip due to + /// this limitation, consider using orderby on a totally-ordered key and filter + /// with a range query instead. + /// + /// + /// The number of search results to retrieve. This can be used in conjunction with + /// $skip to implement client-side paging of search results. If results are + /// truncated due to server-side paging, the response will include a continuation + /// token that can be used to issue another Search request for the next page of + /// results. + /// + /// + /// The name of a semantic configuration that will be used when processing + /// documents for queries of type semantic. + /// + /// + /// Allows the user to choose whether a semantic call should fail completely + /// (default / current behavior), or to return partial results. + /// + /// + /// Allows the user to set an upper bound on the amount of time it takes for + /// semantic enrichment to finish processing before the request fails. + /// + /// + /// Allows setting a separate search query that will be solely used for semantic + /// reranking, semantic captions and semantic answers. Is useful for scenarios + /// where there is a need to use different queries between the base retrieval and + /// ranking phase, and the L2 semantic phase. + /// + /// + /// A value that specifies whether answers should be returned as part of the search + /// response. + /// + /// + /// A value that specifies whether captions should be returned as part of the + /// search response. + /// + /// + /// A value that specifies whether query rewrites should be generated to augment + /// the search query. + /// + /// The comma-separated list of field names used for semantic ranking. + /// + /// The query parameters for vector and hybrid search queries. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// Determines whether or not filters are applied before or after the vector search + /// is performed. Default is 'preFilter' for new indexes. + /// + /// The query parameters to configure hybrid search behaviors. + /// Keeps track of any properties unknown to the library. + internal SearchOptions(bool? includeTotalCount, IList facets, string filter, string highlightFieldsRaw, string highlightPostTag, string highlightPreTag, double? minimumCoverage, string orderByRaw, SearchQueryType? queryType, ScoringStatistics? scoringStatistics, string sessionId, IList scoringParameters, string scoringProfile, QueryDebugMode? debug, string searchText, string searchFieldsRaw, SearchMode? searchMode, QueryLanguage? queryLanguage, QuerySpellerType? querySpeller, string selectRaw, int? skip, int? size, string semanticConfigurationName, SemanticErrorMode? semanticErrorMode, int? semanticMaxWaitInMilliseconds, string semanticQuery, string queryAnswerRaw, string queryCaptionRaw, string queryRewritesRaw, string semanticFieldsRaw, IList vectorQueries, VectorFilterMode? filterMode, HybridSearch hybridSearch, IDictionary serializedAdditionalRawData) + { + IncludeTotalCount = includeTotalCount; + Facets = facets; + Filter = filter; + HighlightFieldsRaw = highlightFieldsRaw; + HighlightPostTag = highlightPostTag; + HighlightPreTag = highlightPreTag; + MinimumCoverage = minimumCoverage; + OrderByRaw = orderByRaw; + QueryType = queryType; + ScoringStatistics = scoringStatistics; + SessionId = sessionId; + ScoringParameters = scoringParameters; + ScoringProfile = scoringProfile; + Debug = debug; + SearchText = searchText; + SearchFieldsRaw = searchFieldsRaw; + SearchMode = searchMode; + QueryLanguage = queryLanguage; + QuerySpeller = querySpeller; + SelectRaw = selectRaw; + Skip = skip; + Size = size; + SemanticConfigurationName = semanticConfigurationName; + SemanticErrorMode = semanticErrorMode; + SemanticMaxWaitInMilliseconds = semanticMaxWaitInMilliseconds; + SemanticQuery = semanticQuery; + QueryAnswerRaw = queryAnswerRaw; + QueryCaptionRaw = queryCaptionRaw; + QueryRewritesRaw = queryRewritesRaw; + SemanticFieldsRaw = semanticFieldsRaw; + VectorQueries = vectorQueries; + FilterMode = filterMode; + HybridSearch = hybridSearch; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. Default is </em>. + /// + public string HighlightPostTag { get; set; } + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. Default is <em>. + /// + public string HighlightPreTag { get; set; } + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a search query in order for the query to be reported as a success. + /// This parameter can be useful for ensuring search availability even for services + /// with only one replica. The default is 100. + /// + public double? MinimumCoverage { get; set; } + /// + /// A value that specifies the syntax of the search query. The default is 'simple'. + /// Use 'full' if your query uses the Lucene query syntax. + /// + public SearchQueryType? QueryType { get; set; } + /// + /// A value that specifies whether we want to calculate scoring statistics (such as + /// document frequency) globally for more consistent scoring, or locally, for lower + /// latency. The default is 'local'. Use 'global' to aggregate scoring statistics + /// globally before scoring. Using global scoring statistics can increase latency + /// of search queries. + /// + public ScoringStatistics? ScoringStatistics { get; set; } + /// + /// A value to be used to create a sticky session, which can help getting more + /// consistent results. As long as the same sessionId is used, a best-effort + /// attempt will be made to target the same replica set. Be wary that reusing the + /// same sessionID values repeatedly can interfere with the load balancing of the + /// requests across replicas and adversely affect the performance of the search + /// service. The value used as sessionId cannot start with a '_' character. + /// + public string SessionId { get; set; } + /// + /// The name of a scoring profile to evaluate match scores for matching documents + /// in order to sort the results. + /// + public string ScoringProfile { get; set; } + /// + /// A value that specifies whether any or all of the search terms must be matched + /// in order to count the document as a match. + /// + public SearchMode? SearchMode { get; set; } + /// + /// The number of search results to skip. This value cannot be greater than + /// 100,000. If you need to scan documents in sequence, but cannot use skip due to + /// this limitation, consider using orderby on a totally-ordered key and filter + /// with a range query instead. + /// + public int? Skip { get; set; } + /// The query parameters to configure hybrid search behaviors. + public HybridSearch HybridSearch { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SearchQueryType.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchQueryType.Serialization.cs similarity index 100% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SearchQueryType.Serialization.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SearchQueryType.Serialization.cs diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.Serialization.cs new file mode 100644 index 000000000000..36542de39e5a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + public partial class SearchResourceCounter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchResourceCounter)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("usage"u8); + writer.WriteNumberValue(Usage); + if (Optional.IsDefined(Quota)) + { + writer.WritePropertyName("quota"u8); + writer.WriteNumberValue(Quota.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + Search.Documents.Indexes.Models.SearchResourceCounter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchResourceCounter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.SearchResourceCounter DeserializeSearchResourceCounter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long usage = default; + long? quota = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("usage"u8)) + { + usage = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("quota"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + quota = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.SearchResourceCounter(usage, quota, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchResourceCounter)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.SearchResourceCounter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SearchResourceCounter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static Search.Documents.Indexes.Models.SearchResourceCounter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.cs new file mode 100644 index 000000000000..1a8458537e95 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceCounter.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// Represents a resource's usage and quota. + public partial class SearchResourceCounter + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The resource usage amount. + internal SearchResourceCounter(long usage) + { + Usage = usage; + } + + /// Initializes a new instance of . + /// The resource usage amount. + /// The resource amount quota. + /// Keeps track of any properties unknown to the library. + internal SearchResourceCounter(long usage, long? quota, IDictionary serializedAdditionalRawData) + { + Usage = usage; + Quota = quota; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchResourceCounter() + { + } + + /// The resource usage amount. + public long Usage { get; } + /// The resource amount quota. + public long? Quota { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.Serialization.cs new file mode 100644 index 000000000000..e33c9768c0d8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchResourceEncryptionKey : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchResourceEncryptionKey)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("keyVaultKeyName"u8); + writer.WriteStringValue(KeyName); + if (Optional.IsDefined(KeyVersion)) + { + writer.WritePropertyName("keyVaultKeyVersion"u8); + writer.WriteStringValue(KeyVersion); + } + writer.WritePropertyName("keyVaultUri"u8); + writer.WriteStringValue(VaultUri); + if (Optional.IsDefined(AccessCredentials)) + { + writer.WritePropertyName("accessCredentials"u8); + writer.WriteObjectValue(AccessCredentials, options); + } + if (Optional.IsDefined(Identity)) + { + writer.WritePropertyName("identity"u8); + writer.WriteObjectValue(Identity, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchResourceEncryptionKey IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchResourceEncryptionKey)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchResourceEncryptionKey(document.RootElement, options); + } + + internal static SearchResourceEncryptionKey DeserializeSearchResourceEncryptionKey(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string keyVaultKeyName = default; + string keyVaultKeyVersion = default; + string keyVaultUri = default; + AzureActiveDirectoryApplicationCredentials accessCredentials = default; + SearchIndexerDataIdentity identity = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("keyVaultKeyName"u8)) + { + keyVaultKeyName = property.Value.GetString(); + continue; + } + if (property.NameEquals("keyVaultKeyVersion"u8)) + { + keyVaultKeyVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("keyVaultUri"u8)) + { + keyVaultUri = property.Value.GetString(); + continue; + } + if (property.NameEquals("accessCredentials"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + accessCredentials = AzureActiveDirectoryApplicationCredentials.DeserializeAzureActiveDirectoryApplicationCredentials(property.Value, options); + continue; + } + if (property.NameEquals("identity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + identity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchResourceEncryptionKey( + keyVaultKeyName, + keyVaultKeyVersion, + keyVaultUri, + accessCredentials, + identity, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchResourceEncryptionKey)} does not support writing '{options.Format}' format."); + } + } + + SearchResourceEncryptionKey IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchResourceEncryptionKey(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchResourceEncryptionKey)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchResourceEncryptionKey FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchResourceEncryptionKey(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.cs new file mode 100644 index 000000000000..39d63b72210d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResourceEncryptionKey.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A customer-managed encryption key in Azure Key Vault. Keys that you create and + /// manage can be used to encrypt or decrypt data-at-rest, such as indexes and + /// synonym maps. + /// + public partial class SearchResourceEncryptionKey + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of your Azure Key Vault key to be used to encrypt your data at rest. + /// + /// The URI of your Azure Key Vault, also referred to as DNS name, that contains + /// the key to be used to encrypt your data at rest. An example URI might be + /// `https://my-keyvault-name.vault.azure.net`. + /// + /// or is null. + public SearchResourceEncryptionKey(string keyName, string vaultUri) + { + Argument.AssertNotNull(keyName, nameof(keyName)); + Argument.AssertNotNull(vaultUri, nameof(vaultUri)); + + KeyName = keyName; + VaultUri = vaultUri; + } + + /// Initializes a new instance of . + /// The name of your Azure Key Vault key to be used to encrypt your data at rest. + /// The version of your Azure Key Vault key to be used to encrypt your data at rest. + /// + /// The URI of your Azure Key Vault, also referred to as DNS name, that contains + /// the key to be used to encrypt your data at rest. An example URI might be + /// `https://my-keyvault-name.vault.azure.net`. + /// + /// + /// Optional Azure Active Directory credentials used for accessing your Azure Key + /// Vault. Not required if using managed identity instead. + /// + /// + /// An explicit managed identity to use for this encryption key. If not specified + /// and the access credentials property is null, the system-assigned managed + /// identity is used. On update to the resource, if the explicit identity is + /// unspecified, it remains unchanged. If "none" is specified, the value of this + /// property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Keeps track of any properties unknown to the library. + internal SearchResourceEncryptionKey(string keyName, string keyVersion, string vaultUri, AzureActiveDirectoryApplicationCredentials accessCredentials, SearchIndexerDataIdentity identity, IDictionary serializedAdditionalRawData) + { + KeyName = keyName; + KeyVersion = keyVersion; + VaultUri = vaultUri; + AccessCredentials = accessCredentials; + Identity = identity; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchResourceEncryptionKey() + { + } + + /// The name of your Azure Key Vault key to be used to encrypt your data at rest. + public string KeyName { get; set; } + /// The version of your Azure Key Vault key to be used to encrypt your data at rest. + public string KeyVersion { get; set; } + /// + /// The URI of your Azure Key Vault, also referred to as DNS name, that contains + /// the key to be used to encrypt your data at rest. An example URI might be + /// `https://my-keyvault-name.vault.azure.net`. + /// + public string VaultUri { get; set; } + /// + /// Optional Azure Active Directory credentials used for accessing your Azure Key + /// Vault. Not required if using managed identity instead. + /// + public AzureActiveDirectoryApplicationCredentials AccessCredentials { get; set; } + /// + /// An explicit managed identity to use for this encryption key. If not specified + /// and the access credentials property is null, the system-assigned managed + /// identity is used. On update to the resource, if the explicit identity is + /// unspecified, it remains unchanged. If "none" is specified, the value of this + /// property is cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity Identity { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.Serialization.cs new file mode 100644 index 000000000000..034a4a1e8ad2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.Serialization.cs @@ -0,0 +1,255 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class SearchResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@search.score"u8); + writer.WriteNumberValue(Score); + if (Optional.IsDefined(RerankerScore)) + { + writer.WritePropertyName("@search.rerankerScore"u8); + writer.WriteNumberValue(RerankerScore.Value); + } + if (Optional.IsCollectionDefined(Highlights)) + { + writer.WritePropertyName("@search.highlights"u8); + writer.WriteStartObject(); + foreach (var item in Highlights) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStartArray(); + foreach (var item0 in item.Value) + { + writer.WriteStringValue(item0); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Captions)) + { + writer.WritePropertyName("@search.captions"u8); + writer.WriteStartArray(); + foreach (var item in Captions) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsCollectionDefined(DocumentDebugInfo)) + { + writer.WritePropertyName("@search.documentDebugInfo"u8); + writer.WriteStartArray(); + foreach (var item in DocumentDebugInfo) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + SearchResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchResult(document.RootElement, options); + } + + internal static SearchResult DeserializeSearchResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double searchScore = default; + double? searchRerankerScore = default; + IReadOnlyDictionary> searchHighlights = default; + IReadOnlyList searchCaptions = default; + IReadOnlyList searchDocumentDebugInfo = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@search.score"u8)) + { + searchScore = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("@search.rerankerScore"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchRerankerScore = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("@search.highlights"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary> dictionary = new Dictionary>(); + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property0.Name, null); + } + else + { + List array = new List(); + foreach (var item in property0.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + dictionary.Add(property0.Name, array); + } + } + searchHighlights = dictionary; + continue; + } + if (property.NameEquals("@search.captions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(QueryCaptionResult.DeserializeQueryCaptionResult(item, options)); + } + searchCaptions = array; + continue; + } + if (property.NameEquals("@search.documentDebugInfo"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.DocumentDebugInfo.DeserializeDocumentDebugInfo(item, options)); + } + searchDocumentDebugInfo = array; + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SearchResult( + searchScore, + searchRerankerScore, + searchHighlights ?? new ChangeTrackingDictionary>(), + searchCaptions ?? new ChangeTrackingList(), + searchDocumentDebugInfo ?? new ChangeTrackingList(), + additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchResult)} does not support writing '{options.Format}' format."); + } + } + + SearchResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.cs new file mode 100644 index 000000000000..a379c9237327 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchResult.cs @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// Contains a document found by a search query, plus associated metadata. + internal partial class SearchResult + { + /// Initializes a new instance of . + /// + /// The relevance score of the document compared to other documents returned by the + /// query. + /// + internal SearchResult(double score) + { + Score = score; + Highlights = new ChangeTrackingDictionary>(); + Captions = new ChangeTrackingList(); + DocumentDebugInfo = new ChangeTrackingList(); + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// The relevance score of the document compared to other documents returned by the + /// query. + /// + /// + /// The relevance score computed by the semantic ranker for the top search results. + /// Search results are sorted by the RerankerScore first and then by the Score. + /// RerankerScore is only returned for queries of type 'semantic'. + /// + /// + /// Text fragments from the document that indicate the matching search terms, + /// organized by each applicable field; null if hit highlighting was not enabled + /// for the query. + /// + /// + /// Captions are the most representative passages from the document relatively to + /// the search query. They are often used as document summary. Captions are only + /// returned for queries of type 'semantic'. + /// + /// + /// Contains debugging information that can be used to further explore your search + /// results. + /// + /// Additional Properties. + internal SearchResult(double score, double? rerankerScore, IReadOnlyDictionary> highlights, IReadOnlyList captions, IReadOnlyList documentDebugInfo, IReadOnlyDictionary additionalProperties) + { + Score = score; + RerankerScore = rerankerScore; + Highlights = highlights; + Captions = captions; + DocumentDebugInfo = documentDebugInfo; + AdditionalProperties = additionalProperties; + } + + /// Initializes a new instance of for deserialization. + internal SearchResult() + { + } + + /// + /// The relevance score of the document compared to other documents returned by the + /// query. + /// + public double Score { get; } + /// + /// The relevance score computed by the semantic ranker for the top search results. + /// Search results are sorted by the RerankerScore first and then by the Score. + /// RerankerScore is only returned for queries of type 'semantic'. + /// + public double? RerankerScore { get; } + /// + /// Text fragments from the document that indicate the matching search terms, + /// organized by each applicable field; null if hit highlighting was not enabled + /// for the query. + /// + public IReadOnlyDictionary> Highlights { get; } + /// + /// Captions are the most representative passages from the document relatively to + /// the search query. They are often used as document summary. Captions are only + /// returned for queries of type 'semantic'. + /// + public IReadOnlyList Captions { get; } + /// + /// Contains debugging information that can be used to further explore your search + /// results. + /// + public IReadOnlyList DocumentDebugInfo { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.Serialization.cs new file mode 100644 index 000000000000..69f94602681c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class SearchScoreThreshold : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchScoreThreshold)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("value"u8); + writer.WriteNumberValue(Value); + } + + SearchScoreThreshold IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchScoreThreshold)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchScoreThreshold(document.RootElement, options); + } + + internal static SearchScoreThreshold DeserializeSearchScoreThreshold(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double value = default; + VectorThresholdKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + value = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorThresholdKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchScoreThreshold(kind, serializedAdditionalRawData, value); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchScoreThreshold)} does not support writing '{options.Format}' format."); + } + } + + SearchScoreThreshold IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchScoreThreshold(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchScoreThreshold)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SearchScoreThreshold FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchScoreThreshold(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.cs new file mode 100644 index 000000000000..a81b7a326e07 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchScoreThreshold.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The results of the vector query will filter based on the '@search.score' value. + /// Note this is the @search.score returned as part of the search response. The + /// threshold direction will be chosen for higher @search.score. + /// + internal partial class SearchScoreThreshold : VectorThreshold + { + /// Initializes a new instance of . + /// + /// The threshold will filter based on the '@search.score' value. Note this is the + /// @search.score returned as part of the search response. The threshold direction + /// will be chosen for higher @search.score. + /// + public SearchScoreThreshold(double value) + { + Kind = VectorThresholdKind.SearchScore; + Value = value; + } + + /// Initializes a new instance of . + /// Type of threshold. + /// Keeps track of any properties unknown to the library. + /// + /// The threshold will filter based on the '@search.score' value. Note this is the + /// @search.score returned as part of the search response. The threshold direction + /// will be chosen for higher @search.score. + /// + internal SearchScoreThreshold(VectorThresholdKind kind, IDictionary serializedAdditionalRawData, double value) : base(kind, serializedAdditionalRawData) + { + Value = value; + } + + /// Initializes a new instance of for deserialization. + internal SearchScoreThreshold() + { + } + + /// + /// The threshold will filter based on the '@search.score' value. Note this is the + /// @search.score returned as part of the search response. The threshold direction + /// will be chosen for higher @search.score. + /// + public double Value { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.Serialization.cs new file mode 100644 index 000000000000..600c5d17b6b2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.Serialization.cs @@ -0,0 +1,217 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + public partial class SearchServiceCounters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceCounters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("aliasesCount"u8); + writer.WriteObjectValue(AliasCounter, options); + writer.WritePropertyName("documentCount"u8); + writer.WriteObjectValue(DocumentCounter, options); + writer.WritePropertyName("indexesCount"u8); + writer.WriteObjectValue(IndexCounter, options); + writer.WritePropertyName("indexersCount"u8); + writer.WriteObjectValue(IndexerCounter, options); + writer.WritePropertyName("dataSourcesCount"u8); + writer.WriteObjectValue(DataSourceCounter, options); + writer.WritePropertyName("storageSize"u8); + writer.WriteObjectValue(StorageSizeCounter, options); + writer.WritePropertyName("synonymMaps"u8); + writer.WriteObjectValue(SynonymMapCounter, options); + writer.WritePropertyName("skillsetCount"u8); + writer.WriteObjectValue(SkillsetCounter, options); + writer.WritePropertyName("vectorIndexSize"u8); + writer.WriteObjectValue(VectorIndexSizeCounter, options); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchServiceCounters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceCounters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchServiceCounters(document.RootElement, options); + } + + internal static SearchServiceCounters DeserializeSearchServiceCounters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Search.Documents.Indexes.Models.SearchResourceCounter aliasesCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter documentCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter indexesCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter indexersCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter dataSourcesCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter storageSize = default; + Search.Documents.Indexes.Models.SearchResourceCounter synonymMaps = default; + Search.Documents.Indexes.Models.SearchResourceCounter skillsetCount = default; + Search.Documents.Indexes.Models.SearchResourceCounter vectorIndexSize = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("aliasesCount"u8)) + { + aliasesCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("documentCount"u8)) + { + documentCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("indexesCount"u8)) + { + indexesCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("indexersCount"u8)) + { + indexersCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("dataSourcesCount"u8)) + { + dataSourcesCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("storageSize"u8)) + { + storageSize = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("synonymMaps"u8)) + { + synonymMaps = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("skillsetCount"u8)) + { + skillsetCount = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (property.NameEquals("vectorIndexSize"u8)) + { + vectorIndexSize = Search.Documents.Indexes.Models.SearchResourceCounter.DeserializeSearchResourceCounter(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchServiceCounters( + aliasesCount, + documentCount, + indexesCount, + indexersCount, + dataSourcesCount, + storageSize, + synonymMaps, + skillsetCount, + vectorIndexSize, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchServiceCounters)} does not support writing '{options.Format}' format."); + } + } + + SearchServiceCounters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceCounters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchServiceCounters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchServiceCounters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceCounters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.cs new file mode 100644 index 000000000000..2c3ec61d7941 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceCounters.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// Represents service-level resource counters and quotas. + public partial class SearchServiceCounters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Total number of aliases. + /// Total number of documents across all indexes in the service. + /// Total number of indexes. + /// Total number of indexers. + /// Total number of data sources. + /// Total size of used storage in bytes. + /// Total number of synonym maps. + /// Total number of skillsets. + /// Total memory consumption of all vector indexes within the service, in bytes. + /// , , , , , , , or is null. + internal SearchServiceCounters(Search.Documents.Indexes.Models.SearchResourceCounter aliasCounter, Search.Documents.Indexes.Models.SearchResourceCounter documentCounter, Search.Documents.Indexes.Models.SearchResourceCounter indexCounter, Search.Documents.Indexes.Models.SearchResourceCounter indexerCounter, Search.Documents.Indexes.Models.SearchResourceCounter dataSourceCounter, Search.Documents.Indexes.Models.SearchResourceCounter storageSizeCounter, Search.Documents.Indexes.Models.SearchResourceCounter synonymMapCounter, Search.Documents.Indexes.Models.SearchResourceCounter skillsetCounter, Search.Documents.Indexes.Models.SearchResourceCounter vectorIndexSizeCounter) + { + Argument.AssertNotNull(aliasCounter, nameof(aliasCounter)); + Argument.AssertNotNull(documentCounter, nameof(documentCounter)); + Argument.AssertNotNull(indexCounter, nameof(indexCounter)); + Argument.AssertNotNull(indexerCounter, nameof(indexerCounter)); + Argument.AssertNotNull(dataSourceCounter, nameof(dataSourceCounter)); + Argument.AssertNotNull(storageSizeCounter, nameof(storageSizeCounter)); + Argument.AssertNotNull(synonymMapCounter, nameof(synonymMapCounter)); + Argument.AssertNotNull(skillsetCounter, nameof(skillsetCounter)); + Argument.AssertNotNull(vectorIndexSizeCounter, nameof(vectorIndexSizeCounter)); + + AliasCounter = aliasCounter; + DocumentCounter = documentCounter; + IndexCounter = indexCounter; + IndexerCounter = indexerCounter; + DataSourceCounter = dataSourceCounter; + StorageSizeCounter = storageSizeCounter; + SynonymMapCounter = synonymMapCounter; + SkillsetCounter = skillsetCounter; + VectorIndexSizeCounter = vectorIndexSizeCounter; + } + + /// Initializes a new instance of . + /// Total number of aliases. + /// Total number of documents across all indexes in the service. + /// Total number of indexes. + /// Total number of indexers. + /// Total number of data sources. + /// Total size of used storage in bytes. + /// Total number of synonym maps. + /// Total number of skillsets. + /// Total memory consumption of all vector indexes within the service, in bytes. + /// Keeps track of any properties unknown to the library. + internal SearchServiceCounters(Search.Documents.Indexes.Models.SearchResourceCounter aliasCounter, Search.Documents.Indexes.Models.SearchResourceCounter documentCounter, Search.Documents.Indexes.Models.SearchResourceCounter indexCounter, Search.Documents.Indexes.Models.SearchResourceCounter indexerCounter, Search.Documents.Indexes.Models.SearchResourceCounter dataSourceCounter, Search.Documents.Indexes.Models.SearchResourceCounter storageSizeCounter, Search.Documents.Indexes.Models.SearchResourceCounter synonymMapCounter, Search.Documents.Indexes.Models.SearchResourceCounter skillsetCounter, Search.Documents.Indexes.Models.SearchResourceCounter vectorIndexSizeCounter, IDictionary serializedAdditionalRawData) + { + AliasCounter = aliasCounter; + DocumentCounter = documentCounter; + IndexCounter = indexCounter; + IndexerCounter = indexerCounter; + DataSourceCounter = dataSourceCounter; + StorageSizeCounter = storageSizeCounter; + SynonymMapCounter = synonymMapCounter; + SkillsetCounter = skillsetCounter; + VectorIndexSizeCounter = vectorIndexSizeCounter; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchServiceCounters() + { + } + + /// Total number of aliases. + public Search.Documents.Indexes.Models.SearchResourceCounter AliasCounter { get; } + /// Total number of documents across all indexes in the service. + public Search.Documents.Indexes.Models.SearchResourceCounter DocumentCounter { get; } + /// Total number of indexes. + public Search.Documents.Indexes.Models.SearchResourceCounter IndexCounter { get; } + /// Total number of indexers. + public Search.Documents.Indexes.Models.SearchResourceCounter IndexerCounter { get; } + /// Total number of data sources. + public Search.Documents.Indexes.Models.SearchResourceCounter DataSourceCounter { get; } + /// Total size of used storage in bytes. + public Search.Documents.Indexes.Models.SearchResourceCounter StorageSizeCounter { get; } + /// Total number of synonym maps. + public Search.Documents.Indexes.Models.SearchResourceCounter SynonymMapCounter { get; } + /// Total number of skillsets. + public Search.Documents.Indexes.Models.SearchResourceCounter SkillsetCounter { get; } + /// Total memory consumption of all vector indexes within the service, in bytes. + public Search.Documents.Indexes.Models.SearchResourceCounter VectorIndexSizeCounter { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.Serialization.cs new file mode 100644 index 000000000000..78dd7efe41b6 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.Serialization.cs @@ -0,0 +1,215 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchServiceLimits : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceLimits)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(MaxFieldsPerIndex)) + { + writer.WritePropertyName("maxFieldsPerIndex"u8); + writer.WriteNumberValue(MaxFieldsPerIndex.Value); + } + if (Optional.IsDefined(MaxFieldNestingDepthPerIndex)) + { + writer.WritePropertyName("maxFieldNestingDepthPerIndex"u8); + writer.WriteNumberValue(MaxFieldNestingDepthPerIndex.Value); + } + if (Optional.IsDefined(MaxComplexCollectionFieldsPerIndex)) + { + writer.WritePropertyName("maxComplexCollectionFieldsPerIndex"u8); + writer.WriteNumberValue(MaxComplexCollectionFieldsPerIndex.Value); + } + if (Optional.IsDefined(MaxComplexObjectsInCollectionsPerDocument)) + { + writer.WritePropertyName("maxComplexObjectsInCollectionsPerDocument"u8); + writer.WriteNumberValue(MaxComplexObjectsInCollectionsPerDocument.Value); + } + if (Optional.IsDefined(MaxStoragePerIndexInBytes)) + { + writer.WritePropertyName("maxStoragePerIndex"u8); + writer.WriteNumberValue(MaxStoragePerIndexInBytes.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchServiceLimits IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceLimits)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchServiceLimits(document.RootElement, options); + } + + internal static SearchServiceLimits DeserializeSearchServiceLimits(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxFieldsPerIndex = default; + int? maxFieldNestingDepthPerIndex = default; + int? maxComplexCollectionFieldsPerIndex = default; + int? maxComplexObjectsInCollectionsPerDocument = default; + long? maxStoragePerIndex = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxFieldsPerIndex"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxFieldsPerIndex = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxFieldNestingDepthPerIndex"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxFieldNestingDepthPerIndex = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxComplexCollectionFieldsPerIndex"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxComplexCollectionFieldsPerIndex = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxComplexObjectsInCollectionsPerDocument"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxComplexObjectsInCollectionsPerDocument = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maxStoragePerIndex"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxStoragePerIndex = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchServiceLimits( + maxFieldsPerIndex, + maxFieldNestingDepthPerIndex, + maxComplexCollectionFieldsPerIndex, + maxComplexObjectsInCollectionsPerDocument, + maxStoragePerIndex, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchServiceLimits)} does not support writing '{options.Format}' format."); + } + } + + SearchServiceLimits IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceLimits(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchServiceLimits)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchServiceLimits FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceLimits(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.cs new file mode 100644 index 000000000000..03e63805be53 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceLimits.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents various service level limits. + public partial class SearchServiceLimits + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal SearchServiceLimits() + { + } + + /// Initializes a new instance of . + /// The maximum allowed fields per index. + /// + /// The maximum depth which you can nest sub-fields in an index, including the + /// top-level complex field. For example, a/b/c has a nesting depth of 3. + /// + /// + /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an + /// index. + /// + /// The maximum number of objects in complex collections allowed per document. + /// The maximum amount of storage in bytes allowed per index. + /// Keeps track of any properties unknown to the library. + internal SearchServiceLimits(int? maxFieldsPerIndex, int? maxFieldNestingDepthPerIndex, int? maxComplexCollectionFieldsPerIndex, int? maxComplexObjectsInCollectionsPerDocument, long? maxStoragePerIndexInBytes, IDictionary serializedAdditionalRawData) + { + MaxFieldsPerIndex = maxFieldsPerIndex; + MaxFieldNestingDepthPerIndex = maxFieldNestingDepthPerIndex; + MaxComplexCollectionFieldsPerIndex = maxComplexCollectionFieldsPerIndex; + MaxComplexObjectsInCollectionsPerDocument = maxComplexObjectsInCollectionsPerDocument; + MaxStoragePerIndexInBytes = maxStoragePerIndexInBytes; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The maximum allowed fields per index. + public int? MaxFieldsPerIndex { get; } + /// + /// The maximum depth which you can nest sub-fields in an index, including the + /// top-level complex field. For example, a/b/c has a nesting depth of 3. + /// + public int? MaxFieldNestingDepthPerIndex { get; } + /// + /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an + /// index. + /// + public int? MaxComplexCollectionFieldsPerIndex { get; } + /// The maximum number of objects in complex collections allowed per document. + public int? MaxComplexObjectsInCollectionsPerDocument { get; } + /// The maximum amount of storage in bytes allowed per index. + public long? MaxStoragePerIndexInBytes { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceRestClient.cs deleted file mode 100644 index 455680d4c694..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceRestClient.cs +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents -{ - internal partial class SearchServiceRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of SearchServiceRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public SearchServiceRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateGetServiceStatisticsRequest() - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/servicestats", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Gets service level statistics for a search service. - /// The cancellation token to use. - public async Task> GetServiceStatisticsAsync(CancellationToken cancellationToken = default) - { - using var message = CreateGetServiceStatisticsRequest(); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchServiceStatistics value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchServiceStatistics.DeserializeSearchServiceStatistics(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Gets service level statistics for a search service. - /// The cancellation token to use. - public Response GetServiceStatistics(CancellationToken cancellationToken = default) - { - using var message = CreateGetServiceStatisticsRequest(); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchServiceStatistics value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchServiceStatistics.DeserializeSearchServiceStatistics(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.Serialization.cs new file mode 100644 index 000000000000..cd830b7223af --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchServiceStatistics : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceStatistics)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("counters"u8); + writer.WriteObjectValue(Counters, options); + writer.WritePropertyName("limits"u8); + writer.WriteObjectValue(Limits, options); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchServiceStatistics IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchServiceStatistics)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchServiceStatistics(document.RootElement, options); + } + + internal static SearchServiceStatistics DeserializeSearchServiceStatistics(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SearchServiceCounters counters = default; + SearchServiceLimits limits = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("counters"u8)) + { + counters = SearchServiceCounters.DeserializeSearchServiceCounters(property.Value, options); + continue; + } + if (property.NameEquals("limits"u8)) + { + limits = SearchServiceLimits.DeserializeSearchServiceLimits(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchServiceStatistics(counters, limits, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchServiceStatistics)} does not support writing '{options.Format}' format."); + } + } + + SearchServiceStatistics IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceStatistics(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchServiceStatistics)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchServiceStatistics FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchServiceStatistics(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.cs new file mode 100644 index 000000000000..2f4b9301d234 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchServiceStatistics.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Response from a get service statistics request. If successful, it includes + /// service level counters and limits. + /// + public partial class SearchServiceStatistics + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Service level resource counters. + /// Service level general limits. + /// or is null. + internal SearchServiceStatistics(SearchServiceCounters counters, SearchServiceLimits limits) + { + Argument.AssertNotNull(counters, nameof(counters)); + Argument.AssertNotNull(limits, nameof(limits)); + + Counters = counters; + Limits = limits; + } + + /// Initializes a new instance of . + /// Service level resource counters. + /// Service level general limits. + /// Keeps track of any properties unknown to the library. + internal SearchServiceStatistics(SearchServiceCounters counters, SearchServiceLimits limits, IDictionary serializedAdditionalRawData) + { + Counters = counters; + Limits = limits; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchServiceStatistics() + { + } + + /// Service level resource counters. + public SearchServiceCounters Counters { get; } + /// Service level general limits. + public SearchServiceLimits Limits { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.Serialization.cs new file mode 100644 index 000000000000..a721b14c203b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.Serialization.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SearchSuggester : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchSuggester)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("searchMode"u8); + writer.WriteStringValue(SearchMode.ToString()); + writer.WritePropertyName("sourceFields"u8); + writer.WriteStartArray(); + foreach (var item in SourceFields) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SearchSuggester IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchSuggester)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchSuggester(document.RootElement, options); + } + + internal static SearchSuggester DeserializeSearchSuggester(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + SearchSuggesterSearchMode searchMode = default; + IList sourceFields = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("searchMode"u8)) + { + searchMode = new SearchSuggesterSearchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("sourceFields"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + sourceFields = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SearchSuggester(name, searchMode, sourceFields, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchSuggester)} does not support writing '{options.Format}' format."); + } + } + + SearchSuggester IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchSuggester(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchSuggester)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SearchSuggester FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchSuggester(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.cs new file mode 100644 index 000000000000..c9462e24fa55 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggester.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// Defines how the Suggest API should apply to a group of fields in the index. + public partial class SearchSuggester + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the suggester. + /// + /// The list of field names to which the suggester applies. Each field must be + /// searchable. + /// + /// or is null. + public SearchSuggester(string name, IEnumerable sourceFields) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(sourceFields, nameof(sourceFields)); + + Name = name; + SourceFields = sourceFields.ToList(); + } + + /// Initializes a new instance of . + /// The name of the suggester. + /// A value indicating the capabilities of the suggester. + /// + /// The list of field names to which the suggester applies. Each field must be + /// searchable. + /// + /// Keeps track of any properties unknown to the library. + internal SearchSuggester(string name, SearchSuggesterSearchMode searchMode, IList sourceFields, IDictionary serializedAdditionalRawData) + { + Name = name; + SearchMode = searchMode; + SourceFields = sourceFields; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SearchSuggester() + { + } + + /// The name of the suggester. + public string Name { get; set; } + /// A value indicating the capabilities of the suggester. + public SearchSuggesterSearchMode SearchMode { get; } = SearchSuggesterSearchMode.AnalyzingInfixMatching; + + /// + /// The list of field names to which the suggester applies. Each field must be + /// searchable. + /// + public IList SourceFields { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggesterSearchMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggesterSearchMode.cs new file mode 100644 index 000000000000..2c1b7b134120 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SearchSuggesterSearchMode.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// The SearchSuggester_searchMode. + public readonly partial struct SearchSuggesterSearchMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SearchSuggesterSearchMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string AnalyzingInfixMatchingValue = "analyzingInfixMatching"; + + /// analyzingInfixMatching. + public static SearchSuggesterSearchMode AnalyzingInfixMatching { get; } = new SearchSuggesterSearchMode(AnalyzingInfixMatchingValue); + /// Determines if two values are the same. + public static bool operator ==(SearchSuggesterSearchMode left, SearchSuggesterSearchMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SearchSuggesterSearchMode left, SearchSuggesterSearchMode right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SearchSuggesterSearchMode(string value) => new SearchSuggesterSearchMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SearchSuggesterSearchMode other && Equals(other); + /// + public bool Equals(SearchSuggesterSearchMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.Serialization.cs new file mode 100644 index 000000000000..125d9def8c10 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SemanticConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticConfiguration)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("prioritizedFields"u8); + writer.WriteObjectValue(PrioritizedFields, options); + if (Optional.IsDefined(FlightingOptIn)) + { + writer.WritePropertyName("flightingOptIn"u8); + writer.WriteBooleanValue(FlightingOptIn.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SemanticConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSemanticConfiguration(document.RootElement, options); + } + + internal static SemanticConfiguration DeserializeSemanticConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + SemanticPrioritizedFields prioritizedFields = default; + bool? flightingOptIn = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("prioritizedFields"u8)) + { + prioritizedFields = SemanticPrioritizedFields.DeserializeSemanticPrioritizedFields(property.Value, options); + continue; + } + if (property.NameEquals("flightingOptIn"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + flightingOptIn = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SemanticConfiguration(name, prioritizedFields, flightingOptIn, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SemanticConfiguration)} does not support writing '{options.Format}' format."); + } + } + + SemanticConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SemanticConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SemanticConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticConfiguration(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.cs new file mode 100644 index 000000000000..a4cfd8665981 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticConfiguration.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a specific configuration to be used in the context of semantic + /// capabilities. + /// + public partial class SemanticConfiguration + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the semantic configuration. + /// + /// Describes the title, content, and keyword fields to be used for semantic + /// ranking, captions, highlights, and answers. At least one of the three sub + /// properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) + /// need to be set. + /// + /// or is null. + public SemanticConfiguration(string name, SemanticPrioritizedFields prioritizedFields) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(prioritizedFields, nameof(prioritizedFields)); + + Name = name; + PrioritizedFields = prioritizedFields; + } + + /// Initializes a new instance of . + /// The name of the semantic configuration. + /// + /// Describes the title, content, and keyword fields to be used for semantic + /// ranking, captions, highlights, and answers. At least one of the three sub + /// properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) + /// need to be set. + /// + /// Determines how which semantic or query rewrite models to use during model flighting/upgrades. + /// Keeps track of any properties unknown to the library. + internal SemanticConfiguration(string name, SemanticPrioritizedFields prioritizedFields, bool? flightingOptIn, IDictionary serializedAdditionalRawData) + { + Name = name; + PrioritizedFields = prioritizedFields; + FlightingOptIn = flightingOptIn; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SemanticConfiguration() + { + } + + /// The name of the semantic configuration. + public string Name { get; set; } + /// + /// Describes the title, content, and keyword fields to be used for semantic + /// ranking, captions, highlights, and answers. At least one of the three sub + /// properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) + /// need to be set. + /// + public SemanticPrioritizedFields PrioritizedFields { get; set; } + /// Determines how which semantic or query rewrite models to use during model flighting/upgrades. + public bool? FlightingOptIn { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.Serialization.cs new file mode 100644 index 000000000000..b4458d6bda03 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.Serialization.cs @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SemanticDebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticDebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(TitleField)) + { + writer.WritePropertyName("titleField"u8); + writer.WriteObjectValue(TitleField, options); + } + if (options.Format != "W" && Optional.IsCollectionDefined(ContentFields)) + { + writer.WritePropertyName("contentFields"u8); + writer.WriteStartArray(); + foreach (var item in ContentFields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsCollectionDefined(KeywordFields)) + { + writer.WritePropertyName("keywordFields"u8); + writer.WriteStartArray(); + foreach (var item in KeywordFields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && Optional.IsDefined(RerankerInput)) + { + writer.WritePropertyName("rerankerInput"u8); + writer.WriteObjectValue(RerankerInput, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SemanticDebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticDebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSemanticDebugInfo(document.RootElement, options); + } + + internal static SemanticDebugInfo DeserializeSemanticDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + QueryResultDocumentSemanticField titleField = default; + IReadOnlyList contentFields = default; + IReadOnlyList keywordFields = default; + QueryResultDocumentRerankerInput rerankerInput = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("titleField"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + titleField = QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(property.Value, options); + continue; + } + if (property.NameEquals("contentFields"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(item, options)); + } + contentFields = array; + continue; + } + if (property.NameEquals("keywordFields"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(QueryResultDocumentSemanticField.DeserializeQueryResultDocumentSemanticField(item, options)); + } + keywordFields = array; + continue; + } + if (property.NameEquals("rerankerInput"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rerankerInput = QueryResultDocumentRerankerInput.DeserializeQueryResultDocumentRerankerInput(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SemanticDebugInfo(titleField, contentFields ?? new ChangeTrackingList(), keywordFields ?? new ChangeTrackingList(), rerankerInput, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SemanticDebugInfo)} does not support writing '{options.Format}' format."); + } + } + + SemanticDebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SemanticDebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SemanticDebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.cs new file mode 100644 index 000000000000..0aaf4c55368a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticDebugInfo.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains debugging information specific to semantic ranking requests. + public partial class SemanticDebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal SemanticDebugInfo() + { + ContentFields = new ChangeTrackingList(); + KeywordFields = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// The title field that was sent to the semantic enrichment process, as well as + /// how it was used + /// + /// + /// The content fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// + /// + /// The keyword fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// + /// The raw concatenated strings that were sent to the semantic enrichment process. + /// Keeps track of any properties unknown to the library. + internal SemanticDebugInfo(QueryResultDocumentSemanticField titleField, IReadOnlyList contentFields, IReadOnlyList keywordFields, QueryResultDocumentRerankerInput rerankerInput, IDictionary serializedAdditionalRawData) + { + TitleField = titleField; + ContentFields = contentFields; + KeywordFields = keywordFields; + RerankerInput = rerankerInput; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The title field that was sent to the semantic enrichment process, as well as + /// how it was used + /// + public QueryResultDocumentSemanticField TitleField { get; } + /// + /// The content fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// + public IReadOnlyList ContentFields { get; } + /// + /// The keyword fields that were sent to the semantic enrichment process, as well + /// as how they were used + /// + public IReadOnlyList KeywordFields { get; } + /// The raw concatenated strings that were sent to the semantic enrichment process. + public QueryResultDocumentRerankerInput RerankerInput { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorMode.cs similarity index 77% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorMode.cs index f63b7a256468..9c3c2da95a37 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorMode.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { - /// Allows the user to choose whether a semantic call should fail completely, or to return partial results. + /// + /// Allows the user to choose whether a semantic call should fail completely, or to + /// return partial results. + /// public readonly partial struct SemanticErrorMode : IEquatable { private readonly string _value; @@ -25,9 +28,16 @@ public SemanticErrorMode(string value) private const string PartialValue = "partial"; private const string FailValue = "fail"; - /// If the semantic processing fails, partial results still return. The definition of partial results depends on what semantic step failed and what was the reason for failure. + /// + /// If the semantic processing fails, partial results still return. The definition + /// of partial results depends on what semantic step failed and what was the reason + /// for failure. + /// public static SemanticErrorMode Partial { get; } = new SemanticErrorMode(PartialValue); - /// If there is an exception during the semantic processing step, the query will fail and return the appropriate HTTP code depending on the error. + /// + /// If there is an exception during the semantic processing step, the query will + /// fail and return the appropriate HTTP code depending on the error. + /// public static SemanticErrorMode Fail { get; } = new SemanticErrorMode(FailValue); /// Determines if two values are the same. public static bool operator ==(SemanticErrorMode left, SemanticErrorMode right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorReason.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorReason.cs similarity index 92% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorReason.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorReason.cs index a2514c208a9b..bb52a5e1f446 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticErrorReason.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticErrorReason.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// Reason that a partial response was returned for a semantic ranking request. public readonly partial struct SemanticErrorReason : IEquatable @@ -26,7 +26,10 @@ public SemanticErrorReason(string value) private const string CapacityOverloadedValue = "capacityOverloaded"; private const string TransientValue = "transient"; - /// If `semanticMaxWaitInMilliseconds` was set and the semantic processing duration exceeded that value. Only the base results were returned. + /// + /// If `semanticMaxWaitInMilliseconds` was set and the semantic processing duration + /// exceeded that value. Only the base results were returned. + /// public static SemanticErrorReason MaxWaitExceeded { get; } = new SemanticErrorReason(MaxWaitExceededValue); /// The request was throttled. Only the base results were returned. public static SemanticErrorReason CapacityOverloaded { get; } = new SemanticErrorReason(CapacityOverloadedValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.Serialization.cs new file mode 100644 index 000000000000..f63c45393620 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.Serialization.cs @@ -0,0 +1,142 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SemanticField : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticField)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("fieldName"u8); + writer.WriteStringValue(FieldName); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SemanticField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticField)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSemanticField(document.RootElement, options); + } + + internal static SemanticField DeserializeSemanticField(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string fieldName = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SemanticField(fieldName, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SemanticField)} does not support writing '{options.Format}' format."); + } + } + + SemanticField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SemanticField)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SemanticField FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticField(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.cs new file mode 100644 index 000000000000..44b07c3c7ff8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticField.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A field that is used as part of the semantic configuration. + public partial class SemanticField + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// File name. + /// is null. + public SemanticField(string fieldName) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + + FieldName = fieldName; + } + + /// Initializes a new instance of . + /// File name. + /// Keeps track of any properties unknown to the library. + internal SemanticField(string fieldName, IDictionary serializedAdditionalRawData) + { + FieldName = fieldName; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SemanticField() + { + } + + /// File name. + public string FieldName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticFieldState.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticFieldState.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticFieldState.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SemanticFieldState.cs index 1a75c5f2e4d6..d14baf7c4e82 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticFieldState.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticFieldState.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// The way the field was used for the semantic enrichment process. public readonly partial struct SemanticFieldState : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.Serialization.cs new file mode 100644 index 000000000000..5ad73faf42c7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.Serialization.cs @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SemanticPrioritizedFields : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticPrioritizedFields)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(TitleField)) + { + writer.WritePropertyName("titleField"u8); + writer.WriteObjectValue(TitleField, options); + } + if (Optional.IsCollectionDefined(ContentFields)) + { + writer.WritePropertyName("prioritizedContentFields"u8); + writer.WriteStartArray(); + foreach (var item in ContentFields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(KeywordsFields)) + { + writer.WritePropertyName("prioritizedKeywordsFields"u8); + writer.WriteStartArray(); + foreach (var item in KeywordsFields) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SemanticPrioritizedFields IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticPrioritizedFields)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSemanticPrioritizedFields(document.RootElement, options); + } + + internal static SemanticPrioritizedFields DeserializeSemanticPrioritizedFields(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SemanticField titleField = default; + IList prioritizedContentFields = default; + IList prioritizedKeywordsFields = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("titleField"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + titleField = SemanticField.DeserializeSemanticField(property.Value, options); + continue; + } + if (property.NameEquals("prioritizedContentFields"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SemanticField.DeserializeSemanticField(item, options)); + } + prioritizedContentFields = array; + continue; + } + if (property.NameEquals("prioritizedKeywordsFields"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SemanticField.DeserializeSemanticField(item, options)); + } + prioritizedKeywordsFields = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SemanticPrioritizedFields(titleField, prioritizedContentFields ?? new ChangeTrackingList(), prioritizedKeywordsFields ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SemanticPrioritizedFields)} does not support writing '{options.Format}' format."); + } + } + + SemanticPrioritizedFields IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticPrioritizedFields(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SemanticPrioritizedFields)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SemanticPrioritizedFields FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticPrioritizedFields(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.cs new file mode 100644 index 000000000000..bad67421e16b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticPrioritizedFields.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Describes the title, content, and keywords fields to be used for semantic + /// ranking, captions, highlights, and answers. + /// + public partial class SemanticPrioritizedFields + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public SemanticPrioritizedFields() + { + ContentFields = new ChangeTrackingList(); + KeywordsFields = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// Defines the title field to be used for semantic ranking, captions, highlights, + /// and answers. If you don't have a title field in your index, leave this blank. + /// + /// + /// Defines the content fields to be used for semantic ranking, captions, + /// highlights, and answers. For the best result, the selected fields should + /// contain text in natural language form. The order of the fields in the array + /// represents their priority. Fields with lower priority may get truncated if the + /// content is long. + /// + /// + /// Defines the keyword fields to be used for semantic ranking, captions, + /// highlights, and answers. For the best result, the selected fields should + /// contain a list of keywords. The order of the fields in the array represents + /// their priority. Fields with lower priority may get truncated if the content is + /// long. + /// + /// Keeps track of any properties unknown to the library. + internal SemanticPrioritizedFields(SemanticField titleField, IList contentFields, IList keywordsFields, IDictionary serializedAdditionalRawData) + { + TitleField = titleField; + ContentFields = contentFields; + KeywordsFields = keywordsFields; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// Defines the title field to be used for semantic ranking, captions, highlights, + /// and answers. If you don't have a title field in your index, leave this blank. + /// + public SemanticField TitleField { get; set; } + /// + /// Defines the content fields to be used for semantic ranking, captions, + /// highlights, and answers. For the best result, the selected fields should + /// contain text in natural language form. The order of the fields in the array + /// represents their priority. Fields with lower priority may get truncated if the + /// content is long. + /// + public IList ContentFields { get; } + /// + /// Defines the keyword fields to be used for semantic ranking, captions, + /// highlights, and answers. For the best result, the selected fields should + /// contain a list of keywords. The order of the fields in the array represents + /// their priority. Fields with lower priority may get truncated if the content is + /// long. + /// + public IList KeywordsFields { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticQueryRewritesResultType.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticQueryRewritesResultType.cs similarity index 91% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticQueryRewritesResultType.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SemanticQueryRewritesResultType.cs index c62707a49a1e..b6f121be53aa 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticQueryRewritesResultType.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticQueryRewritesResultType.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// Type of query rewrite that was used for this request. public readonly partial struct SemanticQueryRewritesResultType : IEquatable @@ -24,7 +24,10 @@ public SemanticQueryRewritesResultType(string value) private const string OriginalQueryOnlyValue = "originalQueryOnly"; - /// Query rewrites were not successfully generated for this request. Only the original query was used to retrieve the results. + /// + /// Query rewrites were not successfully generated for this request. Only the + /// original query was used to retrieve the results. + /// public static SemanticQueryRewritesResultType OriginalQueryOnly { get; } = new SemanticQueryRewritesResultType(OriginalQueryOnlyValue); /// Determines if two values are the same. public static bool operator ==(SemanticQueryRewritesResultType left, SemanticQueryRewritesResultType right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.Serialization.cs new file mode 100644 index 000000000000..875dab47a6fc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.Serialization.cs @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SemanticSearch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticSearch)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(DefaultConfigurationName)) + { + writer.WritePropertyName("defaultConfiguration"u8); + writer.WriteStringValue(DefaultConfigurationName); + } + if (Optional.IsCollectionDefined(Configurations)) + { + writer.WritePropertyName("configurations"u8); + writer.WriteStartArray(); + foreach (var item in Configurations) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SemanticSearch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SemanticSearch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSemanticSearch(document.RootElement, options); + } + + internal static SemanticSearch DeserializeSemanticSearch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string defaultConfiguration = default; + IList configurations = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultConfiguration"u8)) + { + defaultConfiguration = property.Value.GetString(); + continue; + } + if (property.NameEquals("configurations"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SemanticConfiguration.DeserializeSemanticConfiguration(item, options)); + } + configurations = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SemanticSearch(defaultConfiguration, configurations ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SemanticSearch)} does not support writing '{options.Format}' format."); + } + } + + SemanticSearch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticSearch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SemanticSearch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SemanticSearch FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSemanticSearch(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.cs new file mode 100644 index 000000000000..6fe05f23d1af --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearch.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Defines parameters for a search index that influence semantic capabilities. + public partial class SemanticSearch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public SemanticSearch() + { + Configurations = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// + /// Allows you to set the name of a default semantic configuration in your index, + /// making it optional to pass it on as a query parameter every time. + /// + /// The semantic configurations for the index. + /// Keeps track of any properties unknown to the library. + internal SemanticSearch(string defaultConfigurationName, IList configurations, IDictionary serializedAdditionalRawData) + { + DefaultConfigurationName = defaultConfigurationName; + Configurations = configurations; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// Allows you to set the name of a default semantic configuration in your index, + /// making it optional to pass it on as a query parameter every time. + /// + public string DefaultConfigurationName { get; set; } + /// The semantic configurations for the index. + public IList Configurations { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearchResultsType.cs b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearchResultsType.cs similarity index 90% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearchResultsType.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SemanticSearchResultsType.cs index a6e9054231ef..45495c7254e7 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SemanticSearchResultsType.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SemanticSearchResultsType.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// Type of partial response that was returned for a semantic ranking request. public readonly partial struct SemanticSearchResultsType : IEquatable @@ -27,7 +27,11 @@ public SemanticSearchResultsType(string value) /// Results without any semantic enrichment or reranking. public static SemanticSearchResultsType BaseResults { get; } = new SemanticSearchResultsType(BaseResultsValue); - /// Results have been reranked with the reranker model and will include semantic captions. They will not include any answers, answers highlights or caption highlights. + /// + /// Results have been reranked with the reranker model and will include semantic + /// captions. They will not include any answers, answers highlights or caption + /// highlights. + /// public static SemanticSearchResultsType RerankedResults { get; } = new SemanticSearchResultsType(RerankedResultsValue); /// Determines if two values are the same. public static bool operator ==(SemanticSearchResultsType left, SemanticSearchResultsType right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.Serialization.cs new file mode 100644 index 000000000000..a253fefd26ce --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.Serialization.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SentimentSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SentimentSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + } + + SentimentSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SentimentSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSentimentSkill(document.RootElement, options); + } + + internal static SentimentSkill DeserializeSentimentSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SentimentSkillLanguage? defaultLanguageCode = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new SentimentSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SentimentSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SentimentSkill)} does not support writing '{options.Format}' format."); + } + } + + SentimentSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSentimentSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SentimentSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SentimentSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSentimentSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.cs new file mode 100644 index 000000000000..02d6bb966322 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkill.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// This skill is deprecated. Use the V3.SentimentSkill instead. + public partial class SentimentSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public SentimentSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.SentimentSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + internal SentimentSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, SentimentSkillLanguage? defaultLanguageCode) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + } + + /// Initializes a new instance of for deserialization. + internal SentimentSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public SentimentSkillLanguage? DefaultLanguageCode { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillLanguage.cs index 0006ae5e09a3..93691694a7a1 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// Deprecated. The language codes supported for input text by SentimentSkill. public readonly partial struct SentimentSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.Serialization.cs new file mode 100644 index 000000000000..7e743c98cc63 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.Serialization.cs @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Indexes.Models +{ + internal partial class SentimentSkillV3 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SentimentSkillV3)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode); + } + if (Optional.IsDefined(IncludeOpinionMining)) + { + writer.WritePropertyName("includeOpinionMining"u8); + writer.WriteBooleanValue(IncludeOpinionMining.Value); + } + if (Optional.IsDefined(ModelVersion)) + { + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + } + + Search.Documents.Indexes.Models.SentimentSkillV3 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SentimentSkillV3)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return Search.Documents.Indexes.Models.SentimentSkillV3.DeserializeSentimentSkillV3(document.RootElement, options); + } + + internal static Search.Documents.Indexes.Models.SentimentSkillV3 DeserializeSentimentSkillV3(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string defaultLanguageCode = default; + bool? includeOpinionMining = default; + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + defaultLanguageCode = property.Value.GetString(); + continue; + } + if (property.NameEquals("includeOpinionMining"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + includeOpinionMining = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Search.Documents.InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new Search.Documents.Indexes.Models.SentimentSkillV3( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + includeOpinionMining, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SentimentSkillV3)} does not support writing '{options.Format}' format."); + } + } + + Search.Documents.Indexes.Models.SentimentSkillV3 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SentimentSkillV3.DeserializeSentimentSkillV3(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(Search.Documents.Indexes.Models.SentimentSkillV3)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new Search.Documents.Indexes.Models.SentimentSkillV3 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return Search.Documents.Indexes.Models.SentimentSkillV3.DeserializeSentimentSkillV3(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.cs b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.cs new file mode 100644 index 000000000000..7dc88992966d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SentimentSkillV3.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Indexes.Models +{ + /// + /// Using the Text Analytics API, evaluates unstructured text and for each record, + /// provides sentiment labels (such as "negative", "neutral" and "positive") based + /// on the highest confidence score found by the service at a sentence and + /// document-level. + /// + internal partial class SentimentSkillV3 : Search.Documents.SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public SentimentSkillV3(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.V3.SentimentSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// + /// If set to true, the skill output will include information from Text Analytics + /// for opinion mining, namely targets (nouns or verbs) and their associated + /// assessment (adjective) in the text. Default is false. + /// + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + internal SentimentSkillV3(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string defaultLanguageCode, bool? includeOpinionMining, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + IncludeOpinionMining = includeOpinionMining; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal SentimentSkillV3() + { + } + + /// A value indicating which language code to use. Default is `en`. + public string DefaultLanguageCode { get; set; } + /// + /// If set to true, the skill output will include information from Text Analytics + /// for opinion mining, namely targets (nouns or verbs) and their associated + /// assessment (adjective) in the text. Default is false. + /// + public bool? IncludeOpinionMining { get; set; } + /// + /// The version of the model to use when calling the Text Analytics service. It + /// will default to the latest available when not specified. We recommend you do + /// not specify this value unless absolutely necessary. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.Serialization.cs new file mode 100644 index 000000000000..c8c75d71f80d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ShaperSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShaperSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + ShaperSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShaperSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeShaperSkill(document.RootElement, options); + } + + internal static ShaperSkill DeserializeShaperSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ShaperSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ShaperSkill)} does not support writing '{options.Format}' format."); + } + } + + ShaperSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeShaperSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ShaperSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ShaperSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeShaperSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.cs new file mode 100644 index 000000000000..3b333fa06763 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ShaperSkill.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill for reshaping the outputs. It creates a complex type to support + /// composite fields (also known as multipart fields). + /// + public partial class ShaperSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public ShaperSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Util.ShaperSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + internal ShaperSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal ShaperSkill() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.Serialization.cs new file mode 100644 index 000000000000..aa0b9275e31b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.Serialization.cs @@ -0,0 +1,223 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class ShingleTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShingleTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxShingleSize)) + { + writer.WritePropertyName("maxShingleSize"u8); + writer.WriteNumberValue(MaxShingleSize.Value); + } + if (Optional.IsDefined(MinShingleSize)) + { + writer.WritePropertyName("minShingleSize"u8); + writer.WriteNumberValue(MinShingleSize.Value); + } + if (Optional.IsDefined(OutputUnigrams)) + { + writer.WritePropertyName("outputUnigrams"u8); + writer.WriteBooleanValue(OutputUnigrams.Value); + } + if (Optional.IsDefined(OutputUnigramsIfNoShingles)) + { + writer.WritePropertyName("outputUnigramsIfNoShingles"u8); + writer.WriteBooleanValue(OutputUnigramsIfNoShingles.Value); + } + if (Optional.IsDefined(TokenSeparator)) + { + writer.WritePropertyName("tokenSeparator"u8); + writer.WriteStringValue(TokenSeparator); + } + if (Optional.IsDefined(FilterToken)) + { + writer.WritePropertyName("filterToken"u8); + writer.WriteStringValue(FilterToken); + } + } + + ShingleTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShingleTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeShingleTokenFilter(document.RootElement, options); + } + + internal static ShingleTokenFilter DeserializeShingleTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxShingleSize = default; + int? minShingleSize = default; + bool? outputUnigrams = default; + bool? outputUnigramsIfNoShingles = default; + string tokenSeparator = default; + string filterToken = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxShingleSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxShingleSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("minShingleSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minShingleSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("outputUnigrams"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + outputUnigrams = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("outputUnigramsIfNoShingles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + outputUnigramsIfNoShingles = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("tokenSeparator"u8)) + { + tokenSeparator = property.Value.GetString(); + continue; + } + if (property.NameEquals("filterToken"u8)) + { + filterToken = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ShingleTokenFilter( + odataType, + name, + serializedAdditionalRawData, + maxShingleSize, + minShingleSize, + outputUnigrams, + outputUnigramsIfNoShingles, + tokenSeparator, + filterToken); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ShingleTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + ShingleTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeShingleTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ShingleTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new ShingleTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeShingleTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.cs new file mode 100644 index 000000000000..d3a020df1d3f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/ShingleTokenFilter.cs @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Creates combinations of tokens as a single token. This token filter is + /// implemented using Apache Lucene. + /// + public partial class ShingleTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public ShingleTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.ShingleTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The maximum shingle size. Default and minimum value is 2. + /// + /// The minimum shingle size. Default and minimum value is 2. Must be less than the + /// value of maxShingleSize. + /// + /// + /// A value indicating whether the output stream will contain the input tokens + /// (unigrams) as well as shingles. Default is true. + /// + /// + /// A value indicating whether to output unigrams for those times when no shingles + /// are available. This property takes precedence when outputUnigrams is set to + /// false. Default is false. + /// + /// + /// The string to use when joining adjacent tokens to form a shingle. Default is a + /// single space (" "). + /// + /// + /// The string to insert for each position at which there is no token. Default is + /// an underscore ("_"). + /// + internal ShingleTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxShingleSize, int? minShingleSize, bool? outputUnigrams, bool? outputUnigramsIfNoShingles, string tokenSeparator, string filterToken) : base(odataType, name, serializedAdditionalRawData) + { + MaxShingleSize = maxShingleSize; + MinShingleSize = minShingleSize; + OutputUnigrams = outputUnigrams; + OutputUnigramsIfNoShingles = outputUnigramsIfNoShingles; + TokenSeparator = tokenSeparator; + FilterToken = filterToken; + } + + /// Initializes a new instance of for deserialization. + internal ShingleTokenFilter() + { + } + + /// The maximum shingle size. Default and minimum value is 2. + public int? MaxShingleSize { get; set; } + /// + /// The minimum shingle size. Default and minimum value is 2. Must be less than the + /// value of maxShingleSize. + /// + public int? MinShingleSize { get; set; } + /// + /// A value indicating whether the output stream will contain the input tokens + /// (unigrams) as well as shingles. Default is true. + /// + public bool? OutputUnigrams { get; set; } + /// + /// A value indicating whether to output unigrams for those times when no shingles + /// are available. This property takes precedence when outputUnigrams is set to + /// false. Default is false. + /// + public bool? OutputUnigramsIfNoShingles { get; set; } + /// + /// The string to use when joining adjacent tokens to form a shingle. Default is a + /// single space (" "). + /// + public string TokenSeparator { get; set; } + /// + /// The string to insert for each position at which there is no token. Default is + /// an underscore ("_"). + /// + public string FilterToken { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.Serialization.cs new file mode 100644 index 000000000000..b3e2d99c85a0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownSimilarityAlgorithm))] + public partial class SimilarityAlgorithm : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SimilarityAlgorithm IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSimilarityAlgorithm(document.RootElement, options); + } + + internal static SimilarityAlgorithm DeserializeSimilarityAlgorithm(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.BM25Similarity": return BM25SimilarityAlgorithm.DeserializeBM25SimilarityAlgorithm(element, options); + case "#Microsoft.Azure.Search.ClassicSimilarity": return ClassicSimilarityAlgorithm.DeserializeClassicSimilarityAlgorithm(element, options); + } + } + return UnknownSimilarityAlgorithm.DeserializeUnknownSimilarityAlgorithm(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support writing '{options.Format}' format."); + } + } + + SimilarityAlgorithm IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSimilarityAlgorithm(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SimilarityAlgorithm FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSimilarityAlgorithm(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.cs new file mode 100644 index 000000000000..2ee8a9a70029 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SimilarityAlgorithm.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Base type for similarity algorithms. Similarity algorithms are used to + /// calculate scores that tie queries to documents. The higher the score, the more + /// relevant the document is to that specific query. Those scores are used to rank + /// the search results. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class SimilarityAlgorithm + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected SimilarityAlgorithm() + { + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal SimilarityAlgorithm(string odataType, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.Serialization.cs new file mode 100644 index 000000000000..7e55ab2fc971 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.Serialization.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SingleVectorFieldResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SingleVectorFieldResult)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(SearchScore)) + { + writer.WritePropertyName("searchScore"u8); + writer.WriteNumberValue(SearchScore.Value); + } + if (options.Format != "W" && Optional.IsDefined(VectorSimilarity)) + { + writer.WritePropertyName("vectorSimilarity"u8); + writer.WriteNumberValue(VectorSimilarity.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SingleVectorFieldResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SingleVectorFieldResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSingleVectorFieldResult(document.RootElement, options); + } + + internal static SingleVectorFieldResult DeserializeSingleVectorFieldResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double? searchScore = default; + double? vectorSimilarity = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("searchScore"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchScore = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("vectorSimilarity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + vectorSimilarity = property.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SingleVectorFieldResult(searchScore, vectorSimilarity, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SingleVectorFieldResult)} does not support writing '{options.Format}' format."); + } + } + + SingleVectorFieldResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSingleVectorFieldResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SingleVectorFieldResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SingleVectorFieldResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSingleVectorFieldResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.cs new file mode 100644 index 000000000000..fb688525b15b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SingleVectorFieldResult.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A single vector field result. Both @search.score and vector similarity values + /// are returned. Vector similarity is related to @search.score by an equation. + /// + public partial class SingleVectorFieldResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal SingleVectorFieldResult() + { + } + + /// Initializes a new instance of . + /// + /// The @search.score value that is calculated from the vector similarity score. + /// This is the score that's visible in a pure single-field single-vector query. + /// + /// + /// The vector similarity score for this document. Note this is the canonical + /// definition of similarity metric, not the 'distance' version. For example, + /// cosine similarity instead of cosine distance. + /// + /// Keeps track of any properties unknown to the library. + internal SingleVectorFieldResult(double? searchScore, double? vectorSimilarity, IDictionary serializedAdditionalRawData) + { + SearchScore = searchScore; + VectorSimilarity = vectorSimilarity; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The @search.score value that is calculated from the vector similarity score. + /// This is the score that's visible in a pure single-field single-vector query. + /// + public double? SearchScore { get; } + /// + /// The vector similarity score for this document. Note this is the canonical + /// definition of similarity metric, not the 'distance' version. For example, + /// cosine similarity instead of cosine distance. + /// + public double? VectorSimilarity { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Skillsets.cs b/sdk/search/Azure.Search.Documents/src/Generated/Skillsets.cs new file mode 100644 index 000000000000..0c5b926d5c9c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/Skillsets.cs @@ -0,0 +1,872 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; +using Azure.Search.Documents.Models; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The Skillsets sub-client. + public partial class Skillsets + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of Skillsets for mocking. + protected Skillsets() + { + } + + /// Initializes a new instance of Skillsets. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal Skillsets(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// + /// Creates a new skillset in a search service or updates the skillset if it + /// already exists. + /// + /// The name of the skillset. + /// + /// The skillset containing one or more skills to create or update in a search + /// service. + /// + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string skillsetName, SearchIndexerSkillset skillset, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(skillset, nameof(skillset)); + + using RequestContent content = skillset.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(skillsetName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// + /// Creates a new skillset in a search service or updates the skillset if it + /// already exists. + /// + /// The name of the skillset. + /// + /// The skillset containing one or more skills to create or update in a search + /// service. + /// + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string skillsetName, SearchIndexerSkillset skillset, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(skillset, nameof(skillset)); + + using RequestContent content = skillset.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(skillsetName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new skillset in a search service or updates the skillset if it + /// already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string skillsetName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(skillsetName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new skillset in a search service or updates the skillset if it + /// already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the body of the request. + /// Ignores cache reset requirements. + /// Disables cache reprocessing change detection. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string skillsetName, RequestContent content, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(skillsetName, content, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string skillsetName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(skillsetName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string skillsetName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(skillsetName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves a skillset in a search service. + /// The name of the skillset. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetSkillsetAsync(string skillsetName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSkillsetAsync(skillsetName, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// Retrieves a skillset in a search service. + /// The name of the skillset. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetSkillset(string skillsetName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSkillset(skillsetName, context); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves a skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSkillsetAsync(string skillsetName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.GetSkillset"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSkillsetRequest(skillsetName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves a skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSkillset(string skillsetName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.GetSkillset"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSkillsetRequest(skillsetName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all skillsets in a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual async Task> GetSkillsetsAsync(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSkillsetsAsync(select, context).ConfigureAwait(false); + return Response.FromValue(ListSkillsetsResult.FromResponse(response), response); + } + + /// List all skillsets in a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual Response GetSkillsets(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSkillsets(select, context); + return Response.FromValue(ListSkillsetsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] List all skillsets in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSkillsetsAsync(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("Skillsets.GetSkillsets"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSkillsetsRequest(select, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all skillsets in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSkillsets(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("Skillsets.GetSkillsets"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSkillsetsRequest(select, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new skillset in a search service. + /// The skillset containing one or more skills to create in a search service. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(SearchIndexerSkillset skillset, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(skillset, nameof(skillset)); + + using RequestContent content = skillset.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// Creates a new skillset in a search service. + /// The skillset containing one or more skills to create in a search service. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(SearchIndexerSkillset skillset, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(skillset, nameof(skillset)); + + using RequestContent content = skillset.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(SearchIndexerSkillset.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Reset an existing skillset in a search service. + /// The name of the skillset. + /// + /// The names of the skills to reset. If not specified, all skills in the skillset + /// will be reset. + /// + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task ResetSkillsAsync(string skillsetName, ResetSkillsOptions resetSkillsOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(resetSkillsOptions, nameof(resetSkillsOptions)); + + using RequestContent content = resetSkillsOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await ResetSkillsAsync(skillsetName, content, context).ConfigureAwait(false); + return response; + } + + /// Reset an existing skillset in a search service. + /// The name of the skillset. + /// + /// The names of the skills to reset. If not specified, all skills in the skillset + /// will be reset. + /// + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response ResetSkills(string skillsetName, ResetSkillsOptions resetSkillsOptions, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(resetSkillsOptions, nameof(resetSkillsOptions)); + + using RequestContent content = resetSkillsOptions.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = ResetSkills(skillsetName, content, context); + return response; + } + + /// + /// [Protocol Method] Reset an existing skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task ResetSkillsAsync(string skillsetName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.ResetSkills"); + scope.Start(); + try + { + using HttpMessage message = CreateResetSkillsRequest(skillsetName, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Reset an existing skillset in a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the skillset. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response ResetSkills(string skillsetName, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(skillsetName, nameof(skillsetName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("Skillsets.ResetSkills"); + scope.Start(); + try + { + using HttpMessage message = CreateResetSkillsRequest(skillsetName, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateOrUpdateRequest(string skillsetName, RequestContent content, bool? skipIndexerResetRequirementForCache, bool? disableCacheReprocessingChangeDetection, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets('", false); + uri.AppendPath(skillsetName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (skipIndexerResetRequirementForCache != null) + { + uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); + } + if (disableCacheReprocessingChangeDetection != null) + { + uri.AppendQuery("disableCacheReprocessingChangeDetection", disableCacheReprocessingChangeDetection.Value, true); + } + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string skillsetName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets('", false); + uri.AppendPath(skillsetName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetSkillsetRequest(string skillsetName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets('", false); + uri.AppendPath(skillsetName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetSkillsetsRequest(string select, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (select != null) + { + uri.AppendQuery("$select", select, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateResetSkillsRequest(string skillsetName, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/skillsets('", false); + uri.AppendPath(skillsetName, true); + uri.AppendPath("')/search.resetskills", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + private static ResponseClassifier _responseClassifier204; + private static ResponseClassifier ResponseClassifier204 => _responseClassifier204 ??= new StatusCodeClassifier(stackalloc ushort[] { 204 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SkillsetsRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/SkillsetsRestClient.cs deleted file mode 100644 index 29ff7500fa13..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/SkillsetsRestClient.cs +++ /dev/null @@ -1,506 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; -using Azure.Search.Documents.Models; - -namespace Azure.Search.Documents -{ - internal partial class SkillsetsRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of SkillsetsRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public SkillsetsRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCreateOrUpdateRequest(string skillsetName, SearchIndexerSkillset skillset, string ifMatch, string ifNoneMatch, bool? skipIndexerResetRequirementForCache, bool? disableCacheReprocessingChangeDetection) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets('", false); - uri.AppendPath(skillsetName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - if (skipIndexerResetRequirementForCache != null) - { - uri.AppendQuery("ignoreResetRequirements", skipIndexerResetRequirementForCache.Value, true); - } - if (disableCacheReprocessingChangeDetection != null) - { - uri.AppendQuery("disableCacheReprocessingChangeDetection", disableCacheReprocessingChangeDetection.Value, true); - } - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(skillset); - request.Content = content; - return message; - } - - /// Creates a new skillset in a search service or updates the skillset if it already exists. - /// The name of the skillset to create or update. - /// The skillset containing one or more skills to create or update in a search service. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// Disables cache reprocessing change detection. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string skillsetName, SearchIndexerSkillset skillset, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - if (skillset == null) - { - throw new ArgumentNullException(nameof(skillset)); - } - - using var message = CreateCreateOrUpdateRequest(skillsetName, skillset, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexerSkillset value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new skillset in a search service or updates the skillset if it already exists. - /// The name of the skillset to create or update. - /// The skillset containing one or more skills to create or update in a search service. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// Ignores cache reset requirements. - /// Disables cache reprocessing change detection. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string skillsetName, SearchIndexerSkillset skillset, string ifMatch = null, string ifNoneMatch = null, bool? skipIndexerResetRequirementForCache = null, bool? disableCacheReprocessingChangeDetection = null, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - if (skillset == null) - { - throw new ArgumentNullException(nameof(skillset)); - } - - using var message = CreateCreateOrUpdateRequest(skillsetName, skillset, ifMatch, ifNoneMatch, skipIndexerResetRequirementForCache, disableCacheReprocessingChangeDetection); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SearchIndexerSkillset value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string skillsetName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets('", false); - uri.AppendPath(skillsetName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes a skillset in a search service. - /// The name of the skillset to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string skillsetName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - - using var message = CreateDeleteRequest(skillsetName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes a skillset in a search service. - /// The name of the skillset to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string skillsetName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - - using var message = CreateDeleteRequest(skillsetName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string skillsetName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets('", false); - uri.AppendPath(skillsetName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves a skillset in a search service. - /// The name of the skillset to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string skillsetName, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - - using var message = CreateGetRequest(skillsetName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerSkillset value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves a skillset in a search service. - /// The name of the skillset to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string skillsetName, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - - using var message = CreateGetRequest(skillsetName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SearchIndexerSkillset value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest(string select) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets", false); - if (select != null) - { - uri.AppendQuery("$select", select, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// List all skillsets in a search service. - /// Selects which top-level properties of the skillsets to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public async Task> ListAsync(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListSkillsetsResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListSkillsetsResult.DeserializeListSkillsetsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// List all skillsets in a search service. - /// Selects which top-level properties of the skillsets to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public Response List(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListSkillsetsResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListSkillsetsResult.DeserializeListSkillsetsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateRequest(SearchIndexerSkillset skillset) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(skillset); - request.Content = content; - return message; - } - - /// Creates a new skillset in a search service. - /// The skillset containing one or more skills to create in a search service. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SearchIndexerSkillset skillset, CancellationToken cancellationToken = default) - { - if (skillset == null) - { - throw new ArgumentNullException(nameof(skillset)); - } - - using var message = CreateCreateRequest(skillset); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SearchIndexerSkillset value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new skillset in a search service. - /// The skillset containing one or more skills to create in a search service. - /// The cancellation token to use. - /// is null. - public Response Create(SearchIndexerSkillset skillset, CancellationToken cancellationToken = default) - { - if (skillset == null) - { - throw new ArgumentNullException(nameof(skillset)); - } - - using var message = CreateCreateRequest(skillset); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SearchIndexerSkillset value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SearchIndexerSkillset.DeserializeSearchIndexerSkillset(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateResetSkillsRequest(string skillsetName, ResetSkillsOptions resetSkillsOptions) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/skillsets('", false); - uri.AppendPath(skillsetName, true); - uri.AppendPath("')/search.resetskills", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(resetSkillsOptions); - request.Content = content; - return message; - } - - /// Reset an existing skillset in a search service. - /// The name of the skillset to reset. - /// The names of skills to reset. - /// The cancellation token to use. - /// or is null. - public async Task ResetSkillsAsync(string skillsetName, ResetSkillsOptions resetSkillsOptions, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - if (resetSkillsOptions == null) - { - throw new ArgumentNullException(nameof(resetSkillsOptions)); - } - - using var message = CreateResetSkillsRequest(skillsetName, resetSkillsOptions); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Reset an existing skillset in a search service. - /// The name of the skillset to reset. - /// The names of skills to reset. - /// The cancellation token to use. - /// or is null. - public Response ResetSkills(string skillsetName, ResetSkillsOptions resetSkillsOptions, CancellationToken cancellationToken = default) - { - if (skillsetName == null) - { - throw new ArgumentNullException(nameof(skillsetName)); - } - if (resetSkillsOptions == null) - { - throw new ArgumentNullException(nameof(resetSkillsOptions)); - } - - using var message = CreateResetSkillsRequest(skillsetName, resetSkillsOptions); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.Serialization.cs new file mode 100644 index 000000000000..8b71cbb6b19b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.Serialization.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SnowballTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SnowballTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("language"u8); + writer.WriteStringValue(Language.ToString()); + } + + SnowballTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SnowballTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSnowballTokenFilter(document.RootElement, options); + } + + internal static SnowballTokenFilter DeserializeSnowballTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SnowballTokenFilterLanguage language = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("language"u8)) + { + language = new SnowballTokenFilterLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SnowballTokenFilter(odataType, name, serializedAdditionalRawData, language); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SnowballTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + SnowballTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSnowballTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SnowballTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SnowballTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSnowballTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.cs new file mode 100644 index 000000000000..5af0d8da5d1c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilter.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A filter that stems words using a Snowball-generated stemmer. This token filter + /// is implemented using Apache Lucene. + /// + public partial class SnowballTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// The language to use. + /// is null. + public SnowballTokenFilter(string name, SnowballTokenFilterLanguage language) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.SnowballTokenFilter"; + Language = language; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The language to use. + internal SnowballTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, SnowballTokenFilterLanguage language) : base(odataType, name, serializedAdditionalRawData) + { + Language = language; + } + + /// Initializes a new instance of for deserialization. + internal SnowballTokenFilter() + { + } + + /// The language to use. + public SnowballTokenFilterLanguage Language { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilterLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilterLanguage.cs new file mode 100644 index 000000000000..6dc115092748 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SnowballTokenFilterLanguage.cs @@ -0,0 +1,123 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// The language to use for a Snowball token filter. + public readonly partial struct SnowballTokenFilterLanguage : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SnowballTokenFilterLanguage(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ArmenianValue = "armenian"; + private const string BasqueValue = "basque"; + private const string CatalanValue = "catalan"; + private const string DanishValue = "danish"; + private const string DutchValue = "dutch"; + private const string EnglishValue = "english"; + private const string FinnishValue = "finnish"; + private const string FrenchValue = "french"; + private const string GermanValue = "german"; + private const string German2Value = "german2"; + private const string HungarianValue = "hungarian"; + private const string ItalianValue = "italian"; + private const string KpValue = "kp"; + private const string LovinsValue = "lovins"; + private const string NorwegianValue = "norwegian"; + private const string PorterValue = "porter"; + private const string PortugueseValue = "portuguese"; + private const string RomanianValue = "romanian"; + private const string RussianValue = "russian"; + private const string SpanishValue = "spanish"; + private const string SwedishValue = "swedish"; + private const string TurkishValue = "turkish"; + + /// Selects the Lucene Snowball stemming tokenizer for Armenian. + public static SnowballTokenFilterLanguage Armenian { get; } = new SnowballTokenFilterLanguage(ArmenianValue); + /// Selects the Lucene Snowball stemming tokenizer for Basque. + public static SnowballTokenFilterLanguage Basque { get; } = new SnowballTokenFilterLanguage(BasqueValue); + /// Selects the Lucene Snowball stemming tokenizer for Catalan. + public static SnowballTokenFilterLanguage Catalan { get; } = new SnowballTokenFilterLanguage(CatalanValue); + /// Selects the Lucene Snowball stemming tokenizer for Danish. + public static SnowballTokenFilterLanguage Danish { get; } = new SnowballTokenFilterLanguage(DanishValue); + /// Selects the Lucene Snowball stemming tokenizer for Dutch. + public static SnowballTokenFilterLanguage Dutch { get; } = new SnowballTokenFilterLanguage(DutchValue); + /// Selects the Lucene Snowball stemming tokenizer for English. + public static SnowballTokenFilterLanguage English { get; } = new SnowballTokenFilterLanguage(EnglishValue); + /// Selects the Lucene Snowball stemming tokenizer for Finnish. + public static SnowballTokenFilterLanguage Finnish { get; } = new SnowballTokenFilterLanguage(FinnishValue); + /// Selects the Lucene Snowball stemming tokenizer for French. + public static SnowballTokenFilterLanguage French { get; } = new SnowballTokenFilterLanguage(FrenchValue); + /// Selects the Lucene Snowball stemming tokenizer for German. + public static SnowballTokenFilterLanguage German { get; } = new SnowballTokenFilterLanguage(GermanValue); + /// + /// Selects the Lucene Snowball stemming tokenizer that uses the German variant + /// algorithm. + /// + public static SnowballTokenFilterLanguage German2 { get; } = new SnowballTokenFilterLanguage(German2Value); + /// Selects the Lucene Snowball stemming tokenizer for Hungarian. + public static SnowballTokenFilterLanguage Hungarian { get; } = new SnowballTokenFilterLanguage(HungarianValue); + /// Selects the Lucene Snowball stemming tokenizer for Italian. + public static SnowballTokenFilterLanguage Italian { get; } = new SnowballTokenFilterLanguage(ItalianValue); + /// + /// Selects the Lucene Snowball stemming tokenizer for Dutch that uses the + /// Kraaij-Pohlmann stemming algorithm. + /// + public static SnowballTokenFilterLanguage Kp { get; } = new SnowballTokenFilterLanguage(KpValue); + /// + /// Selects the Lucene Snowball stemming tokenizer for English that uses the Lovins + /// stemming algorithm. + /// + public static SnowballTokenFilterLanguage Lovins { get; } = new SnowballTokenFilterLanguage(LovinsValue); + /// Selects the Lucene Snowball stemming tokenizer for Norwegian. + public static SnowballTokenFilterLanguage Norwegian { get; } = new SnowballTokenFilterLanguage(NorwegianValue); + /// + /// Selects the Lucene Snowball stemming tokenizer for English that uses the Porter + /// stemming algorithm. + /// + public static SnowballTokenFilterLanguage Porter { get; } = new SnowballTokenFilterLanguage(PorterValue); + /// Selects the Lucene Snowball stemming tokenizer for Portuguese. + public static SnowballTokenFilterLanguage Portuguese { get; } = new SnowballTokenFilterLanguage(PortugueseValue); + /// Selects the Lucene Snowball stemming tokenizer for Romanian. + public static SnowballTokenFilterLanguage Romanian { get; } = new SnowballTokenFilterLanguage(RomanianValue); + /// Selects the Lucene Snowball stemming tokenizer for Russian. + public static SnowballTokenFilterLanguage Russian { get; } = new SnowballTokenFilterLanguage(RussianValue); + /// Selects the Lucene Snowball stemming tokenizer for Spanish. + public static SnowballTokenFilterLanguage Spanish { get; } = new SnowballTokenFilterLanguage(SpanishValue); + /// Selects the Lucene Snowball stemming tokenizer for Swedish. + public static SnowballTokenFilterLanguage Swedish { get; } = new SnowballTokenFilterLanguage(SwedishValue); + /// Selects the Lucene Snowball stemming tokenizer for Turkish. + public static SnowballTokenFilterLanguage Turkish { get; } = new SnowballTokenFilterLanguage(TurkishValue); + /// Determines if two values are the same. + public static bool operator ==(SnowballTokenFilterLanguage left, SnowballTokenFilterLanguage right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SnowballTokenFilterLanguage left, SnowballTokenFilterLanguage right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SnowballTokenFilterLanguage(string value) => new SnowballTokenFilterLanguage(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SnowballTokenFilterLanguage other && Equals(other); + /// + public bool Equals(SnowballTokenFilterLanguage other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..c135f8bd0733 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.Serialization.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SoftDeleteColumnDeletionDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SoftDeleteColumnDeletionDetectionPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(SoftDeleteColumnName)) + { + writer.WritePropertyName("softDeleteColumnName"u8); + writer.WriteStringValue(SoftDeleteColumnName); + } + if (Optional.IsDefined(SoftDeleteMarkerValue)) + { + writer.WritePropertyName("softDeleteMarkerValue"u8); + writer.WriteStringValue(SoftDeleteMarkerValue); + } + } + + SoftDeleteColumnDeletionDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SoftDeleteColumnDeletionDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSoftDeleteColumnDeletionDetectionPolicy(document.RootElement, options); + } + + internal static SoftDeleteColumnDeletionDetectionPolicy DeserializeSoftDeleteColumnDeletionDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string softDeleteColumnName = default; + string softDeleteMarkerValue = default; + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("softDeleteColumnName"u8)) + { + softDeleteColumnName = property.Value.GetString(); + continue; + } + if (property.NameEquals("softDeleteMarkerValue"u8)) + { + softDeleteMarkerValue = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SoftDeleteColumnDeletionDetectionPolicy(odataType, serializedAdditionalRawData, softDeleteColumnName, softDeleteMarkerValue); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SoftDeleteColumnDeletionDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + SoftDeleteColumnDeletionDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSoftDeleteColumnDeletionDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SoftDeleteColumnDeletionDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SoftDeleteColumnDeletionDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSoftDeleteColumnDeletionDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.cs new file mode 100644 index 000000000000..85feb8cbaa48 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SoftDeleteColumnDeletionDetectionPolicy.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a data deletion detection policy that implements a soft-deletion + /// strategy. It determines whether an item should be deleted based on the value of + /// a designated 'soft delete' column. + /// + public partial class SoftDeleteColumnDeletionDetectionPolicy : DataDeletionDetectionPolicy + { + /// Initializes a new instance of . + public SoftDeleteColumnDeletionDetectionPolicy() + { + OdataType = "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + /// The name of the column to use for soft-deletion detection. + /// The marker value that identifies an item as deleted. + internal SoftDeleteColumnDeletionDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData, string softDeleteColumnName, string softDeleteMarkerValue) : base(odataType, serializedAdditionalRawData) + { + SoftDeleteColumnName = softDeleteColumnName; + SoftDeleteMarkerValue = softDeleteMarkerValue; + } + + /// The name of the column to use for soft-deletion detection. + public string SoftDeleteColumnName { get; set; } + /// The marker value that identifies an item as deleted. + public string SoftDeleteMarkerValue { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.Serialization.cs new file mode 100644 index 000000000000..76ad9b8c0a6d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.Serialization.cs @@ -0,0 +1,285 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SplitSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SplitSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(DefaultLanguageCode)) + { + writer.WritePropertyName("defaultLanguageCode"u8); + writer.WriteStringValue(DefaultLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(TextSplitMode)) + { + writer.WritePropertyName("textSplitMode"u8); + writer.WriteStringValue(TextSplitMode.Value.ToString()); + } + if (Optional.IsDefined(MaximumPageLength)) + { + writer.WritePropertyName("maximumPageLength"u8); + writer.WriteNumberValue(MaximumPageLength.Value); + } + if (Optional.IsDefined(PageOverlapLength)) + { + writer.WritePropertyName("pageOverlapLength"u8); + writer.WriteNumberValue(PageOverlapLength.Value); + } + if (Optional.IsDefined(MaximumPagesToTake)) + { + writer.WritePropertyName("maximumPagesToTake"u8); + writer.WriteNumberValue(MaximumPagesToTake.Value); + } + if (Optional.IsDefined(Unit)) + { + writer.WritePropertyName("unit"u8); + writer.WriteStringValue(Unit.Value.ToString()); + } + if (Optional.IsDefined(AzureOpenAITokenizerParameters)) + { + writer.WritePropertyName("azureOpenAITokenizerParameters"u8); + writer.WriteObjectValue(AzureOpenAITokenizerParameters, options); + } + } + + SplitSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SplitSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSplitSkill(document.RootElement, options); + } + + internal static SplitSkill DeserializeSplitSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SplitSkillLanguage? defaultLanguageCode = default; + TextSplitMode? textSplitMode = default; + int? maximumPageLength = default; + int? pageOverlapLength = default; + int? maximumPagesToTake = default; + SplitSkillUnit? unit = default; + AzureOpenAITokenizerParameters azureOpenAITokenizerParameters = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultLanguageCode = new SplitSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("textSplitMode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + textSplitMode = new TextSplitMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("maximumPageLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maximumPageLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("pageOverlapLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + pageOverlapLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("maximumPagesToTake"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maximumPagesToTake = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("unit"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + unit = new SplitSkillUnit(property.Value.GetString()); + continue; + } + if (property.NameEquals("azureOpenAITokenizerParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + azureOpenAITokenizerParameters = AzureOpenAITokenizerParameters.DeserializeAzureOpenAITokenizerParameters(property.Value, options); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SplitSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultLanguageCode, + textSplitMode, + maximumPageLength, + pageOverlapLength, + maximumPagesToTake, + unit, + azureOpenAITokenizerParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SplitSkill)} does not support writing '{options.Format}' format."); + } + } + + SplitSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSplitSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SplitSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SplitSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSplitSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.cs new file mode 100644 index 000000000000..024c28faf165 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkill.cs @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill to split a string into chunks of text. + public partial class SplitSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// or is null. + public SplitSkill(IEnumerable inputs, IEnumerable outputs) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.SplitSkill"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// A value indicating which language code to use. Default is `en`. + /// A value indicating which split mode to perform. + /// The desired maximum page length. Default is 10000. + /// + /// Only applicable when textSplitMode is set to 'pages'. If specified, n+1th chunk + /// will start with this number of characters/tokens from the end of the nth chunk. + /// + /// + /// Only applicable when textSplitMode is set to 'pages'. If specified, the + /// SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' + /// pages, in order to improve performance when only a few + /// initial pages are needed from each document. + /// + /// + /// Only applies if textSplitMode is set to pages. There are two possible values. + /// The choice of the values will decide the length (maximumPageLength and + /// pageOverlapLength) measurement. The default is 'characters', which means the + /// length will be measured by character. + /// + /// + /// Only applies if the unit is set to azureOpenAITokens. If specified, the + /// splitSkill will use these parameters when performing the tokenization. The + /// parameters are a valid 'encoderModelName' and an optional 'allowedSpecialTokens' property. + /// + internal SplitSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, SplitSkillLanguage? defaultLanguageCode, TextSplitMode? textSplitMode, int? maximumPageLength, int? pageOverlapLength, int? maximumPagesToTake, SplitSkillUnit? unit, AzureOpenAITokenizerParameters azureOpenAITokenizerParameters) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultLanguageCode = defaultLanguageCode; + TextSplitMode = textSplitMode; + MaximumPageLength = maximumPageLength; + PageOverlapLength = pageOverlapLength; + MaximumPagesToTake = maximumPagesToTake; + Unit = unit; + AzureOpenAITokenizerParameters = azureOpenAITokenizerParameters; + } + + /// Initializes a new instance of for deserialization. + internal SplitSkill() + { + } + + /// A value indicating which language code to use. Default is `en`. + public SplitSkillLanguage? DefaultLanguageCode { get; set; } + /// A value indicating which split mode to perform. + public TextSplitMode? TextSplitMode { get; set; } + /// The desired maximum page length. Default is 10000. + public int? MaximumPageLength { get; set; } + /// + /// Only applicable when textSplitMode is set to 'pages'. If specified, n+1th chunk + /// will start with this number of characters/tokens from the end of the nth chunk. + /// + public int? PageOverlapLength { get; set; } + /// + /// Only applicable when textSplitMode is set to 'pages'. If specified, the + /// SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' + /// pages, in order to improve performance when only a few + /// initial pages are needed from each document. + /// + public int? MaximumPagesToTake { get; set; } + /// + /// Only applies if textSplitMode is set to pages. There are two possible values. + /// The choice of the values will decide the length (maximumPageLength and + /// pageOverlapLength) measurement. The default is 'characters', which means the + /// length will be measured by character. + /// + public SplitSkillUnit? Unit { get; set; } + /// + /// Only applies if the unit is set to azureOpenAITokens. If specified, the + /// splitSkill will use these parameters when performing the tokenization. The + /// parameters are a valid 'encoderModelName' and an optional 'allowedSpecialTokens' property. + /// + public AzureOpenAITokenizerParameters AzureOpenAITokenizerParameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillEncoderModelName.cs b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillEncoderModelName.cs new file mode 100644 index 000000000000..f738133adadf --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillEncoderModelName.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// A value indicating which tokenizer to use. + public readonly partial struct SplitSkillEncoderModelName : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SplitSkillEncoderModelName(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string R50kBaseValue = "r50k_base"; + private const string P50kBaseValue = "p50k_base"; + private const string P50kEditValue = "p50k_edit"; + private const string CL100kBaseValue = "cl100k_base"; + + /// + /// Refers to a base model trained with a 50,000 token vocabulary, often used in + /// general natural language processing tasks. + /// + public static SplitSkillEncoderModelName R50kBase { get; } = new SplitSkillEncoderModelName(R50kBaseValue); + /// A base model with a 50,000 token vocabulary, optimized for prompt-based tasks. + public static SplitSkillEncoderModelName P50kBase { get; } = new SplitSkillEncoderModelName(P50kBaseValue); + /// + /// Similar to p50k_base but fine-tuned for editing or rephrasing tasks with a + /// 50,000 token vocabulary. + /// + public static SplitSkillEncoderModelName P50kEdit { get; } = new SplitSkillEncoderModelName(P50kEditValue); + /// A base model with a 100,000 token vocabulary. + public static SplitSkillEncoderModelName CL100kBase { get; } = new SplitSkillEncoderModelName(CL100kBaseValue); + /// Determines if two values are the same. + public static bool operator ==(SplitSkillEncoderModelName left, SplitSkillEncoderModelName right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SplitSkillEncoderModelName left, SplitSkillEncoderModelName right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator SplitSkillEncoderModelName(string value) => new SplitSkillEncoderModelName(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SplitSkillEncoderModelName other && Equals(other); + /// + public bool Equals(SplitSkillEncoderModelName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SplitSkillLanguage.cs index aa0b22959289..206993739e59 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input text by SplitSkill. public readonly partial struct SplitSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillUnit.cs b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillUnit.cs similarity index 92% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillUnit.cs rename to sdk/search/Azure.Search.Documents/src/Generated/SplitSkillUnit.cs index db315551e584..2ca8f71f06d3 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/SplitSkillUnit.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/SplitSkillUnit.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// A value indicating which unit to use. public readonly partial struct SplitSkillUnit : IEquatable @@ -27,7 +27,10 @@ public SplitSkillUnit(string value) /// The length will be measured by character. public static SplitSkillUnit Characters { get; } = new SplitSkillUnit(CharactersValue); - /// The length will be measured by an AzureOpenAI tokenizer from the tiktoken library. + /// + /// The length will be measured by an AzureOpenAI tokenizer from the tiktoken + /// library. + /// public static SplitSkillUnit AzureOpenAITokens { get; } = new SplitSkillUnit(AzureOpenAITokensValue); /// Determines if two values are the same. public static bool operator ==(SplitSkillUnit left, SplitSkillUnit right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.Serialization.cs new file mode 100644 index 000000000000..439c8df317b2 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SqlIntegratedChangeTrackingPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SqlIntegratedChangeTrackingPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + SqlIntegratedChangeTrackingPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SqlIntegratedChangeTrackingPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSqlIntegratedChangeTrackingPolicy(document.RootElement, options); + } + + internal static SqlIntegratedChangeTrackingPolicy DeserializeSqlIntegratedChangeTrackingPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SqlIntegratedChangeTrackingPolicy(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SqlIntegratedChangeTrackingPolicy)} does not support writing '{options.Format}' format."); + } + } + + SqlIntegratedChangeTrackingPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSqlIntegratedChangeTrackingPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SqlIntegratedChangeTrackingPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SqlIntegratedChangeTrackingPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSqlIntegratedChangeTrackingPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.cs new file mode 100644 index 000000000000..2d19b6364f6d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SqlIntegratedChangeTrackingPolicy.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a data change detection policy that captures changes using the + /// Integrated Change Tracking feature of Azure SQL Database. + /// + public partial class SqlIntegratedChangeTrackingPolicy : DataChangeDetectionPolicy + { + /// Initializes a new instance of . + public SqlIntegratedChangeTrackingPolicy() + { + OdataType = "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal SqlIntegratedChangeTrackingPolicy(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.Serialization.cs new file mode 100644 index 000000000000..d6189487ad1b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class StemmerOverrideTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StemmerOverrideTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("rules"u8); + writer.WriteStartArray(); + foreach (var item in Rules) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + + StemmerOverrideTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StemmerOverrideTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStemmerOverrideTokenFilter(document.RootElement, options); + } + + internal static StemmerOverrideTokenFilter DeserializeStemmerOverrideTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList rules = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("rules"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + rules = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new StemmerOverrideTokenFilter(odataType, name, serializedAdditionalRawData, rules); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(StemmerOverrideTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + StemmerOverrideTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStemmerOverrideTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StemmerOverrideTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new StemmerOverrideTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStemmerOverrideTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.cs new file mode 100644 index 000000000000..061269f1ba1f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StemmerOverrideTokenFilter.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Provides the ability to override other stemming filters with custom + /// dictionary-based stemming. Any dictionary-stemmed terms will be marked as + /// keywords so that they will not be stemmed with stemmers down the chain. Must be + /// placed before any stemming filters. This token filter is implemented using + /// Apache Lucene. + /// + public partial class StemmerOverrideTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// + /// A list of stemming rules in the following format: "word => stem", for example: + /// "ran => run". + /// + /// or is null. + public StemmerOverrideTokenFilter(string name, IEnumerable rules) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(rules, nameof(rules)); + + OdataType = "#Microsoft.Azure.Search.StemmerOverrideTokenFilter"; + Rules = rules.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A list of stemming rules in the following format: "word => stem", for example: + /// "ran => run". + /// + internal StemmerOverrideTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList rules) : base(odataType, name, serializedAdditionalRawData) + { + Rules = rules; + } + + /// Initializes a new instance of for deserialization. + internal StemmerOverrideTokenFilter() + { + } + + /// + /// A list of stemming rules in the following format: "word => stem", for example: + /// "ran => run". + /// + public IList Rules { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.Serialization.cs new file mode 100644 index 000000000000..c0da5613ed73 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.Serialization.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class StemmerTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StemmerTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("language"u8); + writer.WriteStringValue(Language.ToString()); + } + + StemmerTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StemmerTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStemmerTokenFilter(document.RootElement, options); + } + + internal static StemmerTokenFilter DeserializeStemmerTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + StemmerTokenFilterLanguage language = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("language"u8)) + { + language = new StemmerTokenFilterLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new StemmerTokenFilter(odataType, name, serializedAdditionalRawData, language); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(StemmerTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + StemmerTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStemmerTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StemmerTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new StemmerTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStemmerTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.cs new file mode 100644 index 000000000000..a9b3f3be056a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilter.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Language specific stemming filter. This token filter is implemented using + /// Apache Lucene. + /// + public partial class StemmerTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// The language to use. + /// is null. + public StemmerTokenFilter(string name, StemmerTokenFilterLanguage language) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.StemmerTokenFilter"; + Language = language; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The language to use. + internal StemmerTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, StemmerTokenFilterLanguage language) : base(odataType, name, serializedAdditionalRawData) + { + Language = language; + } + + /// Initializes a new instance of for deserialization. + internal StemmerTokenFilter() + { + } + + /// The language to use. + public StemmerTokenFilterLanguage Language { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilterLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilterLanguage.cs new file mode 100644 index 000000000000..e5fa3550c3e9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StemmerTokenFilterLanguage.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// The language to use for a stemmer token filter. + public readonly partial struct StemmerTokenFilterLanguage : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public StemmerTokenFilterLanguage(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ArabicValue = "arabic"; + private const string ArmenianValue = "armenian"; + private const string BasqueValue = "basque"; + private const string BrazilianValue = "brazilian"; + private const string BulgarianValue = "bulgarian"; + private const string CatalanValue = "catalan"; + private const string CzechValue = "czech"; + private const string DanishValue = "danish"; + private const string DutchValue = "dutch"; + private const string DutchKpValue = "dutchKp"; + private const string EnglishValue = "english"; + private const string LightEnglishValue = "lightEnglish"; + private const string MinimalEnglishValue = "minimalEnglish"; + private const string PossessiveEnglishValue = "possessiveEnglish"; + private const string Porter2Value = "porter2"; + private const string LovinsValue = "lovins"; + private const string FinnishValue = "finnish"; + private const string LightFinnishValue = "lightFinnish"; + private const string FrenchValue = "french"; + private const string LightFrenchValue = "lightFrench"; + private const string MinimalFrenchValue = "minimalFrench"; + private const string GalicianValue = "galician"; + private const string MinimalGalicianValue = "minimalGalician"; + private const string GermanValue = "german"; + private const string German2Value = "german2"; + private const string LightGermanValue = "lightGerman"; + private const string MinimalGermanValue = "minimalGerman"; + private const string GreekValue = "greek"; + private const string HindiValue = "hindi"; + private const string HungarianValue = "hungarian"; + private const string LightHungarianValue = "lightHungarian"; + private const string IndonesianValue = "indonesian"; + private const string IrishValue = "irish"; + private const string ItalianValue = "italian"; + private const string LightItalianValue = "lightItalian"; + private const string SoraniValue = "sorani"; + private const string LatvianValue = "latvian"; + private const string NorwegianValue = "norwegian"; + private const string LightNorwegianValue = "lightNorwegian"; + private const string MinimalNorwegianValue = "minimalNorwegian"; + private const string LightNynorskValue = "lightNynorsk"; + private const string MinimalNynorskValue = "minimalNynorsk"; + private const string PortugueseValue = "portuguese"; + private const string LightPortugueseValue = "lightPortuguese"; + private const string MinimalPortugueseValue = "minimalPortuguese"; + private const string PortugueseRslpValue = "portugueseRslp"; + private const string RomanianValue = "romanian"; + private const string RussianValue = "russian"; + private const string LightRussianValue = "lightRussian"; + private const string SpanishValue = "spanish"; + private const string LightSpanishValue = "lightSpanish"; + private const string SwedishValue = "swedish"; + private const string LightSwedishValue = "lightSwedish"; + private const string TurkishValue = "turkish"; + + /// Selects the Lucene stemming tokenizer for Arabic. + public static StemmerTokenFilterLanguage Arabic { get; } = new StemmerTokenFilterLanguage(ArabicValue); + /// Selects the Lucene stemming tokenizer for Armenian. + public static StemmerTokenFilterLanguage Armenian { get; } = new StemmerTokenFilterLanguage(ArmenianValue); + /// Selects the Lucene stemming tokenizer for Basque. + public static StemmerTokenFilterLanguage Basque { get; } = new StemmerTokenFilterLanguage(BasqueValue); + /// Selects the Lucene stemming tokenizer for Portuguese (Brazil). + public static StemmerTokenFilterLanguage Brazilian { get; } = new StemmerTokenFilterLanguage(BrazilianValue); + /// Selects the Lucene stemming tokenizer for Bulgarian. + public static StemmerTokenFilterLanguage Bulgarian { get; } = new StemmerTokenFilterLanguage(BulgarianValue); + /// Selects the Lucene stemming tokenizer for Catalan. + public static StemmerTokenFilterLanguage Catalan { get; } = new StemmerTokenFilterLanguage(CatalanValue); + /// Selects the Lucene stemming tokenizer for Czech. + public static StemmerTokenFilterLanguage Czech { get; } = new StemmerTokenFilterLanguage(CzechValue); + /// Selects the Lucene stemming tokenizer for Danish. + public static StemmerTokenFilterLanguage Danish { get; } = new StemmerTokenFilterLanguage(DanishValue); + /// Selects the Lucene stemming tokenizer for Dutch. + public static StemmerTokenFilterLanguage Dutch { get; } = new StemmerTokenFilterLanguage(DutchValue); + /// + /// Selects the Lucene stemming tokenizer for Dutch that uses the Kraaij-Pohlmann + /// stemming algorithm. + /// + public static StemmerTokenFilterLanguage DutchKp { get; } = new StemmerTokenFilterLanguage(DutchKpValue); + /// Selects the Lucene stemming tokenizer for English. + public static StemmerTokenFilterLanguage English { get; } = new StemmerTokenFilterLanguage(EnglishValue); + /// Selects the Lucene stemming tokenizer for English that does light stemming. + public static StemmerTokenFilterLanguage LightEnglish { get; } = new StemmerTokenFilterLanguage(LightEnglishValue); + /// Selects the Lucene stemming tokenizer for English that does minimal stemming. + public static StemmerTokenFilterLanguage MinimalEnglish { get; } = new StemmerTokenFilterLanguage(MinimalEnglishValue); + /// + /// Selects the Lucene stemming tokenizer for English that removes trailing + /// possessives from words. + /// + public static StemmerTokenFilterLanguage PossessiveEnglish { get; } = new StemmerTokenFilterLanguage(PossessiveEnglishValue); + /// + /// Selects the Lucene stemming tokenizer for English that uses the Porter2 + /// stemming algorithm. + /// + public static StemmerTokenFilterLanguage Porter2 { get; } = new StemmerTokenFilterLanguage(Porter2Value); + /// + /// Selects the Lucene stemming tokenizer for English that uses the Lovins stemming + /// algorithm. + /// + public static StemmerTokenFilterLanguage Lovins { get; } = new StemmerTokenFilterLanguage(LovinsValue); + /// Selects the Lucene stemming tokenizer for Finnish. + public static StemmerTokenFilterLanguage Finnish { get; } = new StemmerTokenFilterLanguage(FinnishValue); + /// Selects the Lucene stemming tokenizer for Finnish that does light stemming. + public static StemmerTokenFilterLanguage LightFinnish { get; } = new StemmerTokenFilterLanguage(LightFinnishValue); + /// Selects the Lucene stemming tokenizer for French. + public static StemmerTokenFilterLanguage French { get; } = new StemmerTokenFilterLanguage(FrenchValue); + /// Selects the Lucene stemming tokenizer for French that does light stemming. + public static StemmerTokenFilterLanguage LightFrench { get; } = new StemmerTokenFilterLanguage(LightFrenchValue); + /// Selects the Lucene stemming tokenizer for French that does minimal stemming. + public static StemmerTokenFilterLanguage MinimalFrench { get; } = new StemmerTokenFilterLanguage(MinimalFrenchValue); + /// Selects the Lucene stemming tokenizer for Galician. + public static StemmerTokenFilterLanguage Galician { get; } = new StemmerTokenFilterLanguage(GalicianValue); + /// Selects the Lucene stemming tokenizer for Galician that does minimal stemming. + public static StemmerTokenFilterLanguage MinimalGalician { get; } = new StemmerTokenFilterLanguage(MinimalGalicianValue); + /// Selects the Lucene stemming tokenizer for German. + public static StemmerTokenFilterLanguage German { get; } = new StemmerTokenFilterLanguage(GermanValue); + /// Selects the Lucene stemming tokenizer that uses the German variant algorithm. + public static StemmerTokenFilterLanguage German2 { get; } = new StemmerTokenFilterLanguage(German2Value); + /// Selects the Lucene stemming tokenizer for German that does light stemming. + public static StemmerTokenFilterLanguage LightGerman { get; } = new StemmerTokenFilterLanguage(LightGermanValue); + /// Selects the Lucene stemming tokenizer for German that does minimal stemming. + public static StemmerTokenFilterLanguage MinimalGerman { get; } = new StemmerTokenFilterLanguage(MinimalGermanValue); + /// Selects the Lucene stemming tokenizer for Greek. + public static StemmerTokenFilterLanguage Greek { get; } = new StemmerTokenFilterLanguage(GreekValue); + /// Selects the Lucene stemming tokenizer for Hindi. + public static StemmerTokenFilterLanguage Hindi { get; } = new StemmerTokenFilterLanguage(HindiValue); + /// Selects the Lucene stemming tokenizer for Hungarian. + public static StemmerTokenFilterLanguage Hungarian { get; } = new StemmerTokenFilterLanguage(HungarianValue); + /// Selects the Lucene stemming tokenizer for Hungarian that does light stemming. + public static StemmerTokenFilterLanguage LightHungarian { get; } = new StemmerTokenFilterLanguage(LightHungarianValue); + /// Selects the Lucene stemming tokenizer for Indonesian. + public static StemmerTokenFilterLanguage Indonesian { get; } = new StemmerTokenFilterLanguage(IndonesianValue); + /// Selects the Lucene stemming tokenizer for Irish. + public static StemmerTokenFilterLanguage Irish { get; } = new StemmerTokenFilterLanguage(IrishValue); + /// Selects the Lucene stemming tokenizer for Italian. + public static StemmerTokenFilterLanguage Italian { get; } = new StemmerTokenFilterLanguage(ItalianValue); + /// Selects the Lucene stemming tokenizer for Italian that does light stemming. + public static StemmerTokenFilterLanguage LightItalian { get; } = new StemmerTokenFilterLanguage(LightItalianValue); + /// Selects the Lucene stemming tokenizer for Sorani. + public static StemmerTokenFilterLanguage Sorani { get; } = new StemmerTokenFilterLanguage(SoraniValue); + /// Selects the Lucene stemming tokenizer for Latvian. + public static StemmerTokenFilterLanguage Latvian { get; } = new StemmerTokenFilterLanguage(LatvianValue); + /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål). + public static StemmerTokenFilterLanguage Norwegian { get; } = new StemmerTokenFilterLanguage(NorwegianValue); + /// + /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål) that does light + /// stemming. + /// + public static StemmerTokenFilterLanguage LightNorwegian { get; } = new StemmerTokenFilterLanguage(LightNorwegianValue); + /// + /// Selects the Lucene stemming tokenizer for Norwegian (Bokmål) that does minimal + /// stemming. + /// + public static StemmerTokenFilterLanguage MinimalNorwegian { get; } = new StemmerTokenFilterLanguage(MinimalNorwegianValue); + /// + /// Selects the Lucene stemming tokenizer for Norwegian (Nynorsk) that does light + /// stemming. + /// + public static StemmerTokenFilterLanguage LightNynorsk { get; } = new StemmerTokenFilterLanguage(LightNynorskValue); + /// + /// Selects the Lucene stemming tokenizer for Norwegian (Nynorsk) that does minimal + /// stemming. + /// + public static StemmerTokenFilterLanguage MinimalNynorsk { get; } = new StemmerTokenFilterLanguage(MinimalNynorskValue); + /// Selects the Lucene stemming tokenizer for Portuguese. + public static StemmerTokenFilterLanguage Portuguese { get; } = new StemmerTokenFilterLanguage(PortugueseValue); + /// Selects the Lucene stemming tokenizer for Portuguese that does light stemming. + public static StemmerTokenFilterLanguage LightPortuguese { get; } = new StemmerTokenFilterLanguage(LightPortugueseValue); + /// Selects the Lucene stemming tokenizer for Portuguese that does minimal stemming. + public static StemmerTokenFilterLanguage MinimalPortuguese { get; } = new StemmerTokenFilterLanguage(MinimalPortugueseValue); + /// + /// Selects the Lucene stemming tokenizer for Portuguese that uses the RSLP + /// stemming algorithm. + /// + public static StemmerTokenFilterLanguage PortugueseRslp { get; } = new StemmerTokenFilterLanguage(PortugueseRslpValue); + /// Selects the Lucene stemming tokenizer for Romanian. + public static StemmerTokenFilterLanguage Romanian { get; } = new StemmerTokenFilterLanguage(RomanianValue); + /// Selects the Lucene stemming tokenizer for Russian. + public static StemmerTokenFilterLanguage Russian { get; } = new StemmerTokenFilterLanguage(RussianValue); + /// Selects the Lucene stemming tokenizer for Russian that does light stemming. + public static StemmerTokenFilterLanguage LightRussian { get; } = new StemmerTokenFilterLanguage(LightRussianValue); + /// Selects the Lucene stemming tokenizer for Spanish. + public static StemmerTokenFilterLanguage Spanish { get; } = new StemmerTokenFilterLanguage(SpanishValue); + /// Selects the Lucene stemming tokenizer for Spanish that does light stemming. + public static StemmerTokenFilterLanguage LightSpanish { get; } = new StemmerTokenFilterLanguage(LightSpanishValue); + /// Selects the Lucene stemming tokenizer for Swedish. + public static StemmerTokenFilterLanguage Swedish { get; } = new StemmerTokenFilterLanguage(SwedishValue); + /// Selects the Lucene stemming tokenizer for Swedish that does light stemming. + public static StemmerTokenFilterLanguage LightSwedish { get; } = new StemmerTokenFilterLanguage(LightSwedishValue); + /// Selects the Lucene stemming tokenizer for Turkish. + public static StemmerTokenFilterLanguage Turkish { get; } = new StemmerTokenFilterLanguage(TurkishValue); + /// Determines if two values are the same. + public static bool operator ==(StemmerTokenFilterLanguage left, StemmerTokenFilterLanguage right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(StemmerTokenFilterLanguage left, StemmerTokenFilterLanguage right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator StemmerTokenFilterLanguage(string value) => new StemmerTokenFilterLanguage(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is StemmerTokenFilterLanguage other && Equals(other); + /// + public bool Equals(StemmerTokenFilterLanguage other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.Serialization.cs new file mode 100644 index 000000000000..9f968d757555 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class StopAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StopAnalyzer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(Stopwords)) + { + writer.WritePropertyName("stopwords"u8); + writer.WriteStartArray(); + foreach (var item in Stopwords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + } + + StopAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StopAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStopAnalyzer(document.RootElement, options); + } + + internal static StopAnalyzer DeserializeStopAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList stopwords = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("stopwords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + stopwords = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new StopAnalyzer(odataType, name, serializedAdditionalRawData, stopwords ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(StopAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + StopAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStopAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StopAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new StopAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStopAnalyzer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.cs new file mode 100644 index 000000000000..584168dafda1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StopAnalyzer.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Divides text at non-letters; Applies the lowercase and stopword token filters. + /// This analyzer is implemented using Apache Lucene. + /// + public partial class StopAnalyzer : LexicalAnalyzer + { + /// Initializes a new instance of . + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public StopAnalyzer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.StopAnalyzer"; + Stopwords = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// A list of stopwords. + internal StopAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData, IList stopwords) : base(odataType, name, serializedAdditionalRawData) + { + Stopwords = stopwords; + } + + /// Initializes a new instance of for deserialization. + internal StopAnalyzer() + { + } + + /// A list of stopwords. + public IList Stopwords { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StopwordsList.cs b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsList.cs new file mode 100644 index 000000000000..bd2d052f7f6b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsList.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Identifies a predefined list of language-specific stopwords. + public readonly partial struct StopwordsList : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public StopwordsList(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ArabicValue = "arabic"; + private const string ArmenianValue = "armenian"; + private const string BasqueValue = "basque"; + private const string BrazilianValue = "brazilian"; + private const string BulgarianValue = "bulgarian"; + private const string CatalanValue = "catalan"; + private const string CzechValue = "czech"; + private const string DanishValue = "danish"; + private const string DutchValue = "dutch"; + private const string EnglishValue = "english"; + private const string FinnishValue = "finnish"; + private const string FrenchValue = "french"; + private const string GalicianValue = "galician"; + private const string GermanValue = "german"; + private const string GreekValue = "greek"; + private const string HindiValue = "hindi"; + private const string HungarianValue = "hungarian"; + private const string IndonesianValue = "indonesian"; + private const string IrishValue = "irish"; + private const string ItalianValue = "italian"; + private const string LatvianValue = "latvian"; + private const string NorwegianValue = "norwegian"; + private const string PersianValue = "persian"; + private const string PortugueseValue = "portuguese"; + private const string RomanianValue = "romanian"; + private const string RussianValue = "russian"; + private const string SoraniValue = "sorani"; + private const string SpanishValue = "spanish"; + private const string SwedishValue = "swedish"; + private const string ThaiValue = "thai"; + private const string TurkishValue = "turkish"; + + /// Selects the stopword list for Arabic. + public static StopwordsList Arabic { get; } = new StopwordsList(ArabicValue); + /// Selects the stopword list for Armenian. + public static StopwordsList Armenian { get; } = new StopwordsList(ArmenianValue); + /// Selects the stopword list for Basque. + public static StopwordsList Basque { get; } = new StopwordsList(BasqueValue); + /// Selects the stopword list for Portuguese (Brazil). + public static StopwordsList Brazilian { get; } = new StopwordsList(BrazilianValue); + /// Selects the stopword list for Bulgarian. + public static StopwordsList Bulgarian { get; } = new StopwordsList(BulgarianValue); + /// Selects the stopword list for Catalan. + public static StopwordsList Catalan { get; } = new StopwordsList(CatalanValue); + /// Selects the stopword list for Czech. + public static StopwordsList Czech { get; } = new StopwordsList(CzechValue); + /// Selects the stopword list for Danish. + public static StopwordsList Danish { get; } = new StopwordsList(DanishValue); + /// Selects the stopword list for Dutch. + public static StopwordsList Dutch { get; } = new StopwordsList(DutchValue); + /// Selects the stopword list for English. + public static StopwordsList English { get; } = new StopwordsList(EnglishValue); + /// Selects the stopword list for Finnish. + public static StopwordsList Finnish { get; } = new StopwordsList(FinnishValue); + /// Selects the stopword list for French. + public static StopwordsList French { get; } = new StopwordsList(FrenchValue); + /// Selects the stopword list for Galician. + public static StopwordsList Galician { get; } = new StopwordsList(GalicianValue); + /// Selects the stopword list for German. + public static StopwordsList German { get; } = new StopwordsList(GermanValue); + /// Selects the stopword list for Greek. + public static StopwordsList Greek { get; } = new StopwordsList(GreekValue); + /// Selects the stopword list for Hindi. + public static StopwordsList Hindi { get; } = new StopwordsList(HindiValue); + /// Selects the stopword list for Hungarian. + public static StopwordsList Hungarian { get; } = new StopwordsList(HungarianValue); + /// Selects the stopword list for Indonesian. + public static StopwordsList Indonesian { get; } = new StopwordsList(IndonesianValue); + /// Selects the stopword list for Irish. + public static StopwordsList Irish { get; } = new StopwordsList(IrishValue); + /// Selects the stopword list for Italian. + public static StopwordsList Italian { get; } = new StopwordsList(ItalianValue); + /// Selects the stopword list for Latvian. + public static StopwordsList Latvian { get; } = new StopwordsList(LatvianValue); + /// Selects the stopword list for Norwegian. + public static StopwordsList Norwegian { get; } = new StopwordsList(NorwegianValue); + /// Selects the stopword list for Persian. + public static StopwordsList Persian { get; } = new StopwordsList(PersianValue); + /// Selects the stopword list for Portuguese. + public static StopwordsList Portuguese { get; } = new StopwordsList(PortugueseValue); + /// Selects the stopword list for Romanian. + public static StopwordsList Romanian { get; } = new StopwordsList(RomanianValue); + /// Selects the stopword list for Russian. + public static StopwordsList Russian { get; } = new StopwordsList(RussianValue); + /// Selects the stopword list for Sorani. + public static StopwordsList Sorani { get; } = new StopwordsList(SoraniValue); + /// Selects the stopword list for Spanish. + public static StopwordsList Spanish { get; } = new StopwordsList(SpanishValue); + /// Selects the stopword list for Swedish. + public static StopwordsList Swedish { get; } = new StopwordsList(SwedishValue); + /// Selects the stopword list for Thai. + public static StopwordsList Thai { get; } = new StopwordsList(ThaiValue); + /// Selects the stopword list for Turkish. + public static StopwordsList Turkish { get; } = new StopwordsList(TurkishValue); + /// Determines if two values are the same. + public static bool operator ==(StopwordsList left, StopwordsList right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(StopwordsList left, StopwordsList right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator StopwordsList(string value) => new StopwordsList(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is StopwordsList other && Equals(other); + /// + public bool Equals(StopwordsList other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.Serialization.cs new file mode 100644 index 000000000000..5697c4684c1e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.Serialization.cs @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class StopwordsTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StopwordsTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsCollectionDefined(Stopwords)) + { + writer.WritePropertyName("stopwords"u8); + writer.WriteStartArray(); + foreach (var item in Stopwords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(StopwordsList)) + { + writer.WritePropertyName("stopwordsList"u8); + writer.WriteStringValue(StopwordsList.Value.ToString()); + } + if (Optional.IsDefined(IgnoreCase)) + { + writer.WritePropertyName("ignoreCase"u8); + writer.WriteBooleanValue(IgnoreCase.Value); + } + if (Optional.IsDefined(RemoveTrailingStopWords)) + { + writer.WritePropertyName("removeTrailing"u8); + writer.WriteBooleanValue(RemoveTrailingStopWords.Value); + } + } + + StopwordsTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StopwordsTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStopwordsTokenFilter(document.RootElement, options); + } + + internal static StopwordsTokenFilter DeserializeStopwordsTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList stopwords = default; + StopwordsList? stopwordsList = default; + bool? ignoreCase = default; + bool? removeTrailing = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("stopwords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + stopwords = array; + continue; + } + if (property.NameEquals("stopwordsList"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + stopwordsList = new StopwordsList(property.Value.GetString()); + continue; + } + if (property.NameEquals("ignoreCase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreCase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("removeTrailing"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + removeTrailing = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new StopwordsTokenFilter( + odataType, + name, + serializedAdditionalRawData, + stopwords ?? new ChangeTrackingList(), + stopwordsList, + ignoreCase, + removeTrailing); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(StopwordsTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + StopwordsTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStopwordsTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StopwordsTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new StopwordsTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeStopwordsTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.cs new file mode 100644 index 000000000000..6b64f8e8da4c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/StopwordsTokenFilter.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Removes stop words from a token stream. This token filter is implemented using + /// Apache Lucene. + /// + public partial class StopwordsTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public StopwordsTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.StopwordsTokenFilter"; + Stopwords = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The list of stopwords. This property and the stopwords list property cannot + /// both be set. + /// + /// + /// A predefined list of stopwords to use. This property and the stopwords property + /// cannot both be set. Default is English. + /// + /// + /// A value indicating whether to ignore case. If true, all words are converted to + /// lower case first. Default is false. + /// + /// + /// A value indicating whether to ignore the last search term if it's a stop word. + /// Default is true. + /// + internal StopwordsTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList stopwords, StopwordsList? stopwordsList, bool? ignoreCase, bool? removeTrailingStopWords) : base(odataType, name, serializedAdditionalRawData) + { + Stopwords = stopwords; + StopwordsList = stopwordsList; + IgnoreCase = ignoreCase; + RemoveTrailingStopWords = removeTrailingStopWords; + } + + /// Initializes a new instance of for deserialization. + internal StopwordsTokenFilter() + { + } + + /// + /// The list of stopwords. This property and the stopwords list property cannot + /// both be set. + /// + public IList Stopwords { get; } + /// + /// A predefined list of stopwords to use. This property and the stopwords property + /// cannot both be set. Default is English. + /// + public StopwordsList? StopwordsList { get; set; } + /// + /// A value indicating whether to ignore case. If true, all words are converted to + /// lower case first. Default is false. + /// + public bool? IgnoreCase { get; set; } + /// + /// A value indicating whether to ignore the last search term if it's a stop word. + /// Default is true. + /// + public bool? RemoveTrailingStopWords { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.Serialization.cs new file mode 100644 index 000000000000..8b9fb5142358 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class SuggestDocumentsResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestDocumentsResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (var item in Results) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(Coverage)) + { + writer.WritePropertyName("@search.coverage"u8); + writer.WriteNumberValue(Coverage.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SuggestDocumentsResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestDocumentsResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSuggestDocumentsResult(document.RootElement, options); + } + + internal static SuggestDocumentsResult DeserializeSuggestDocumentsResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList value = default; + double? searchCoverage = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(SuggestResult.DeserializeSuggestResult(item, options)); + } + value = array; + continue; + } + if (property.NameEquals("@search.coverage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchCoverage = property.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SuggestDocumentsResult(value, searchCoverage, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SuggestDocumentsResult)} does not support writing '{options.Format}' format."); + } + } + + SuggestDocumentsResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestDocumentsResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SuggestDocumentsResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SuggestDocumentsResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestDocumentsResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.cs new file mode 100644 index 000000000000..aeff080191d0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestDocumentsResult.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents.Models +{ + /// Response containing suggestion query results from an index. + internal partial class SuggestDocumentsResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The sequence of results returned by the query. + /// is null. + internal SuggestDocumentsResult(IEnumerable results) + { + Argument.AssertNotNull(results, nameof(results)); + + Results = results.ToList(); + } + + /// Initializes a new instance of . + /// The sequence of results returned by the query. + /// + /// A value indicating the percentage of the index that was included in the query, + /// or null if minimumCoverage was not set in the request. + /// + /// Keeps track of any properties unknown to the library. + internal SuggestDocumentsResult(IReadOnlyList results, double? coverage, IDictionary serializedAdditionalRawData) + { + Results = results; + Coverage = coverage; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SuggestDocumentsResult() + { + } + + /// The sequence of results returned by the query. + public IReadOnlyList Results { get; } + /// + /// A value indicating the percentage of the index that was included in the query, + /// or null if minimumCoverage was not set in the request. + /// + public double? Coverage { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.Serialization.cs new file mode 100644 index 000000000000..81d1296c39ab --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.Serialization.cs @@ -0,0 +1,273 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SuggestOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestOptions)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Filter)) + { + writer.WritePropertyName("filter"u8); + writer.WriteStringValue(Filter); + } + if (Optional.IsDefined(UseFuzzyMatching)) + { + writer.WritePropertyName("fuzzy"u8); + writer.WriteBooleanValue(UseFuzzyMatching.Value); + } + if (Optional.IsDefined(HighlightPostTag)) + { + writer.WritePropertyName("highlightPostTag"u8); + writer.WriteStringValue(HighlightPostTag); + } + if (Optional.IsDefined(HighlightPreTag)) + { + writer.WritePropertyName("highlightPreTag"u8); + writer.WriteStringValue(HighlightPreTag); + } + if (Optional.IsDefined(MinimumCoverage)) + { + writer.WritePropertyName("minimumCoverage"u8); + writer.WriteNumberValue(MinimumCoverage.Value); + } + if (Optional.IsDefined(OrderByRaw)) + { + writer.WritePropertyName("orderby"u8); + writer.WriteStringValue(OrderByRaw); + } + writer.WritePropertyName("search"u8); + writer.WriteStringValue(SearchText); + if (Optional.IsDefined(SearchFieldsRaw)) + { + writer.WritePropertyName("searchFields"u8); + writer.WriteStringValue(SearchFieldsRaw); + } + if (Optional.IsDefined(SelectRaw)) + { + writer.WritePropertyName("select"u8); + writer.WriteStringValue(SelectRaw); + } + writer.WritePropertyName("suggesterName"u8); + writer.WriteStringValue(SuggesterName); + if (Optional.IsDefined(Size)) + { + writer.WritePropertyName("top"u8); + writer.WriteNumberValue(Size.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SuggestOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSuggestOptions(document.RootElement, options); + } + + internal static SuggestOptions DeserializeSuggestOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string filter = default; + bool? fuzzy = default; + string highlightPostTag = default; + string highlightPreTag = default; + double? minimumCoverage = default; + string orderby = default; + string search = default; + string searchFields = default; + string select = default; + string suggesterName = default; + int? top = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("filter"u8)) + { + filter = property.Value.GetString(); + continue; + } + if (property.NameEquals("fuzzy"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fuzzy = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("highlightPostTag"u8)) + { + highlightPostTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("highlightPreTag"u8)) + { + highlightPreTag = property.Value.GetString(); + continue; + } + if (property.NameEquals("minimumCoverage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + minimumCoverage = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("orderby"u8)) + { + orderby = property.Value.GetString(); + continue; + } + if (property.NameEquals("search"u8)) + { + search = property.Value.GetString(); + continue; + } + if (property.NameEquals("searchFields"u8)) + { + searchFields = property.Value.GetString(); + continue; + } + if (property.NameEquals("select"u8)) + { + select = property.Value.GetString(); + continue; + } + if (property.NameEquals("suggesterName"u8)) + { + suggesterName = property.Value.GetString(); + continue; + } + if (property.NameEquals("top"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + top = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SuggestOptions( + filter, + fuzzy, + highlightPostTag, + highlightPreTag, + minimumCoverage, + orderby, + search, + searchFields, + select, + suggesterName, + top, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SuggestOptions)} does not support writing '{options.Format}' format."); + } + } + + SuggestOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SuggestOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SuggestOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.cs new file mode 100644 index 000000000000..e86ec5a47252 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestOptions.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Parameters for filtering, sorting, fuzzy matching, and other suggestions query + /// behaviors. + /// + public partial class SuggestOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// An OData expression that filters the documents considered for suggestions. + /// + /// A value indicating whether to use fuzzy matching for the suggestion query. + /// Default is false. When set to true, the query will find suggestions even if + /// there's a substituted or missing character in the search text. While this + /// provides a better experience in some scenarios, it comes at a performance cost + /// as fuzzy suggestion searches are slower and consume more resources. + /// + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestion query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + /// + /// The comma-separated list of OData $orderby expressions by which to sort the + /// results. Each expression can be either a field name or a call to either the + /// geo.distance() or the search.score() functions. Each expression can be followed + /// by asc to indicate ascending, or desc to indicate descending. The default is + /// ascending order. Ties will be broken by the match scores of documents. If no + /// $orderby is specified, the default sort order is descending by document match + /// score. There can be at most 32 $orderby clauses. + /// + /// + /// The search text to use to suggest documents. Must be at least 1 character, and + /// no more than 100 characters. + /// + /// + /// The comma-separated list of field names to search for the specified search + /// text. Target fields must be included in the specified suggester. + /// + /// + /// The comma-separated list of fields to retrieve. If unspecified, only the key + /// field will be included in the results. + /// + /// + /// The name of the suggester as specified in the suggesters collection that's part + /// of the index definition. + /// + /// + /// The number of suggestions to retrieve. This must be a value between 1 and 100. + /// The default is 5. + /// + /// Keeps track of any properties unknown to the library. + internal SuggestOptions(string filter, bool? useFuzzyMatching, string highlightPostTag, string highlightPreTag, double? minimumCoverage, string orderByRaw, string searchText, string searchFieldsRaw, string selectRaw, string suggesterName, int? size, IDictionary serializedAdditionalRawData) + { + Filter = filter; + UseFuzzyMatching = useFuzzyMatching; + HighlightPostTag = highlightPostTag; + HighlightPreTag = highlightPreTag; + MinimumCoverage = minimumCoverage; + OrderByRaw = orderByRaw; + SearchText = searchText; + SearchFieldsRaw = searchFieldsRaw; + SelectRaw = selectRaw; + SuggesterName = suggesterName; + Size = size; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + /// + /// A value indicating whether to use fuzzy matching for the suggestion query. + /// Default is false. When set to true, the query will find suggestions even if + /// there's a substituted or missing character in the search text. While this + /// provides a better experience in some scenarios, it comes at a performance cost + /// as fuzzy suggestion searches are slower and consume more resources. + /// + public bool? UseFuzzyMatching { get; set; } + /// + /// A string tag that is appended to hit highlights. Must be set with + /// highlightPreTag. If omitted, hit highlighting of suggestions is disabled. + /// + public string HighlightPostTag { get; set; } + /// + /// A string tag that is prepended to hit highlights. Must be set with + /// highlightPostTag. If omitted, hit highlighting of suggestions is disabled. + /// + public string HighlightPreTag { get; set; } + /// + /// A number between 0 and 100 indicating the percentage of the index that must be + /// covered by a suggestion query in order for the query to be reported as a + /// success. This parameter can be useful for ensuring search availability even for + /// services with only one replica. The default is 80. + /// + public double? MinimumCoverage { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.Serialization.cs new file mode 100644 index 000000000000..1974a0064de8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.Serialization.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents.Models +{ + internal partial class SuggestResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestResult)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@search.text"u8); + writer.WriteStringValue(Text); + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + SuggestResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SuggestResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSuggestResult(document.RootElement, options); + } + + internal static SuggestResult DeserializeSuggestResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string searchText = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@search.text"u8)) + { + searchText = property.Value.GetString(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SuggestResult(searchText, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SuggestResult)} does not support writing '{options.Format}' format."); + } + } + + SuggestResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SuggestResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SuggestResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSuggestResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.cs new file mode 100644 index 000000000000..8824e037ef06 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SuggestResult.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents.Models +{ + /// + /// A result containing a document found by a suggestion query, plus associated + /// metadata. + /// + internal partial class SuggestResult + { + /// Initializes a new instance of . + /// The text of the suggestion result. + /// is null. + internal SuggestResult(string text) + { + Argument.AssertNotNull(text, nameof(text)); + + Text = text; + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The text of the suggestion result. + /// Additional Properties. + internal SuggestResult(string text, IReadOnlyDictionary additionalProperties) + { + Text = text; + AdditionalProperties = additionalProperties; + } + + /// Initializes a new instance of for deserialization. + internal SuggestResult() + { + } + + /// The text of the suggestion result. + public string Text { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.Serialization.cs new file mode 100644 index 000000000000..f09ac7052311 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.Serialization.cs @@ -0,0 +1,190 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SynonymMap : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SynonymMap)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("format"u8); + writer.WriteStringValue(Format.ToString()); + writer.WritePropertyName("synonyms"u8); + writer.WriteStringValue(Synonyms); + if (Optional.IsDefined(EncryptionKey)) + { + writer.WritePropertyName("encryptionKey"u8); + writer.WriteObjectValue(EncryptionKey, options); + } + if (Optional.IsDefined(ETag)) + { + writer.WritePropertyName("@odata.etag"u8); + writer.WriteStringValue(ETag); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + SynonymMap IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SynonymMap)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSynonymMap(document.RootElement, options); + } + + internal static SynonymMap DeserializeSynonymMap(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + SynonymMapFormat format = default; + string synonyms = default; + SearchResourceEncryptionKey encryptionKey = default; + string odataEtag = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("format"u8)) + { + format = new SynonymMapFormat(property.Value.GetString()); + continue; + } + if (property.NameEquals("synonyms"u8)) + { + synonyms = property.Value.GetString(); + continue; + } + if (property.NameEquals("encryptionKey"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionKey = SearchResourceEncryptionKey.DeserializeSearchResourceEncryptionKey(property.Value, options); + continue; + } + if (property.NameEquals("@odata.etag"u8)) + { + odataEtag = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SynonymMap( + name, + format, + synonyms, + encryptionKey, + odataEtag, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SynonymMap)} does not support writing '{options.Format}' format."); + } + } + + SynonymMap IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSynonymMap(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SynonymMap)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static SynonymMap FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSynonymMap(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.cs new file mode 100644 index 000000000000..cf7ad9084df0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMap.cs @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Represents a synonym map definition. + public partial class SynonymMap + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name of the synonym map. + /// + /// A series of synonym rules in the specified synonym map format. The rules must + /// be separated by newlines. + /// + /// or is null. + public SynonymMap(string name, string synonyms) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(synonyms, nameof(synonyms)); + + Name = name; + Synonyms = synonyms; + } + + /// Initializes a new instance of . + /// The name of the synonym map. + /// The format of the synonym map. Only the 'solr' format is currently supported. + /// + /// A series of synonym rules in the specified synonym map format. The rules must + /// be separated by newlines. + /// + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your data when + /// you want full assurance that no one, not even Microsoft, can decrypt your data. + /// Once you have encrypted your data, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your data will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + /// The ETag of the synonym map. + /// Keeps track of any properties unknown to the library. + internal SynonymMap(string name, SynonymMapFormat format, string synonyms, SearchResourceEncryptionKey encryptionKey, string eTag, IDictionary serializedAdditionalRawData) + { + Name = name; + Format = format; + Synonyms = synonyms; + EncryptionKey = encryptionKey; + ETag = eTag; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal SynonymMap() + { + } + + /// The name of the synonym map. + public string Name { get; set; } + /// The format of the synonym map. Only the 'solr' format is currently supported. + public SynonymMapFormat Format { get; } = SynonymMapFormat.Solr; + + /// + /// A series of synonym rules in the specified synonym map format. The rules must + /// be separated by newlines. + /// + public string Synonyms { get; set; } + /// + /// A description of an encryption key that you create in Azure Key Vault. This key + /// is used to provide an additional level of encryption-at-rest for your data when + /// you want full assurance that no one, not even Microsoft, can decrypt your data. + /// Once you have encrypted your data, it will always remain encrypted. The search + /// service will ignore attempts to set this property to null. You can change this + /// property as needed if you want to rotate your encryption key; Your data will be + /// unaffected. Encryption with customer-managed keys is not available for free + /// search services, and is only available for paid services created on or after + /// January 1, 2019. + /// + public SearchResourceEncryptionKey EncryptionKey { get; set; } + /// The ETag of the synonym map. + public string ETag { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymMaps.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMaps.cs new file mode 100644 index 000000000000..c7aed74c362f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMaps.cs @@ -0,0 +1,695 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.Search.Documents +{ + // Data plane generated sub-client. + /// The SynonymMaps sub-client. + public partial class SynonymMaps + { + private const string AuthorizationHeader = "api-key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://search.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of SynonymMaps for mocking. + protected SynonymMaps() + { + } + + /// Initializes a new instance of SynonymMaps. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// Service host. + /// The API version to use for this operation. + internal SynonymMaps(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _apiVersion = apiVersion; + } + + /// Creates a new synonym map or updates a synonym map if it already exists. + /// The name of the synonym map. + /// The definition of the synonym map to create or update. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> CreateOrUpdateAsync(string synonymMapName, SynonymMap synonymMap, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + Argument.AssertNotNull(synonymMap, nameof(synonymMap)); + + using RequestContent content = synonymMap.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateOrUpdateAsync(synonymMapName, content, matchConditions, context).ConfigureAwait(false); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// Creates a new synonym map or updates a synonym map if it already exists. + /// The name of the synonym map. + /// The definition of the synonym map to create or update. + /// The content to send as the request conditions of the request. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response CreateOrUpdate(string synonymMapName, SynonymMap synonymMap, MatchConditions matchConditions = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + Argument.AssertNotNull(synonymMap, nameof(synonymMap)); + + using RequestContent content = synonymMap.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateOrUpdate(synonymMapName, content, matchConditions, context); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new synonym map or updates a synonym map if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the synonym map. + /// The content to send as the body of the request. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateOrUpdateAsync(string synonymMapName, RequestContent content, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(synonymMapName, content, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new synonym map or updates a synonym map if it already exists. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the synonym map. + /// The content to send as the body of the request. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateOrUpdate(string synonymMapName, RequestContent content, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.CreateOrUpdate"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateOrUpdateRequest(synonymMapName, content, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a synonym map. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the synonym map. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(string synonymMapName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(synonymMapName, matchConditions, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes a synonym map. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The name of the synonym map. + /// The content to send as the request conditions of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(string synonymMapName, MatchConditions matchConditions = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(synonymMapName, matchConditions, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieves a synonym map definition. + /// The name of the synonym map. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetSynonymMapAsync(string synonymMapName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSynonymMapAsync(synonymMapName, context).ConfigureAwait(false); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// Retrieves a synonym map definition. + /// The name of the synonym map. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetSynonymMap(string synonymMapName, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSynonymMap(synonymMapName, context); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieves a synonym map definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the synonym map. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSynonymMapAsync(string synonymMapName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.GetSynonymMap"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSynonymMapRequest(synonymMapName, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieves a synonym map definition. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The name of the synonym map. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSynonymMap(string synonymMapName, RequestContext context) + { + Argument.AssertNotNullOrEmpty(synonymMapName, nameof(synonymMapName)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.GetSynonymMap"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSynonymMapRequest(synonymMapName, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists all synonym maps available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual async Task> GetSynonymMapsAsync(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSynonymMapsAsync(select, context).ConfigureAwait(false); + return Response.FromValue(ListSynonymMapsResult.FromResponse(response), response); + } + + /// Lists all synonym maps available for a search service. + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The cancellation token to use. + /// + public virtual Response GetSynonymMaps(string select = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSynonymMaps(select, context); + return Response.FromValue(ListSynonymMapsResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Lists all synonym maps available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSynonymMapsAsync(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.GetSynonymMaps"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSynonymMapsRequest(select, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Lists all synonym maps available for a search service. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// + /// Selects which top-level properties to retrieve. + /// Specified as a comma-separated list of JSON property names, + /// or '*' for all properties. The default is all properties. + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSynonymMaps(string select, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.GetSynonymMaps"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSynonymMapsRequest(select, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new synonym map. + /// The definition of the synonym map to create. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreateAsync(SynonymMap synonymMap, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(synonymMap, nameof(synonymMap)); + + using RequestContent content = synonymMap.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(content, context).ConfigureAwait(false); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// Creates a new synonym map. + /// The definition of the synonym map to create. + /// The cancellation token to use. + /// is null. + /// + public virtual Response Create(SynonymMap synonymMap, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(synonymMap, nameof(synonymMap)); + + using RequestContent content = synonymMap.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(content, context); + return Response.FromValue(SynonymMap.FromResponse(response), response); + } + + /// + /// [Protocol Method] Creates a new synonym map. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new synonym map. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("SynonymMaps.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateOrUpdateRequest(string synonymMapName, RequestContent content, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200201); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/synonymmaps('", false); + uri.AppendPath(synonymMapName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Prefer", "return=representation"); + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(string synonymMapName, MatchConditions matchConditions, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier204404); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/synonymmaps('", false); + uri.AppendPath(synonymMapName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + if (matchConditions != null) + { + request.Headers.Add(matchConditions); + } + return message; + } + + internal HttpMessage CreateGetSynonymMapRequest(string synonymMapName, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/synonymmaps('", false); + uri.AppendPath(synonymMapName, true); + uri.AppendPath("')", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetSynonymMapsRequest(string select, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/synonymmaps", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (select != null) + { + uri.AppendQuery("$select", select, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier201); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/synonymmaps", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200201; + private static ResponseClassifier ResponseClassifier200201 => _responseClassifier200201 ??= new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + private static ResponseClassifier _responseClassifier204404; + private static ResponseClassifier ResponseClassifier204404 => _responseClassifier204404 ??= new StatusCodeClassifier(stackalloc ushort[] { 204, 404 }); + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier201; + private static ResponseClassifier ResponseClassifier201 => _responseClassifier201 ??= new StatusCodeClassifier(stackalloc ushort[] { 201 }); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymMapsRestClient.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymMapsRestClient.cs deleted file mode 100644 index 8f59b4b04278..000000000000 --- a/sdk/search/Azure.Search.Documents/src/Generated/SynonymMapsRestClient.cs +++ /dev/null @@ -1,419 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -// - -#nullable disable - -using System; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Search.Documents.Indexes.Models; - -namespace Azure.Search.Documents -{ - internal partial class SynonymMapsRestClient - { - private readonly HttpPipeline _pipeline; - private readonly string _endpoint; - private readonly Guid? _xMsClientRequestId; - private readonly string _apiVersion; - - /// The ClientDiagnostics is used to provide tracing support for the client library. - internal ClientDiagnostics ClientDiagnostics { get; } - - /// Initializes a new instance of SynonymMapsRestClient. - /// The handler for diagnostic messaging in the client. - /// The HTTP pipeline for sending and receiving REST requests and responses. - /// The endpoint URL of the search service. - /// The tracking ID sent with the request to help with debugging. - /// Api Version. - /// , , or is null. - public SynonymMapsRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, Guid? xMsClientRequestId = null, string apiVersion = "2024-11-01-preview") - { - ClientDiagnostics = clientDiagnostics ?? throw new ArgumentNullException(nameof(clientDiagnostics)); - _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); - _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); - _xMsClientRequestId = xMsClientRequestId; - _apiVersion = apiVersion ?? throw new ArgumentNullException(nameof(apiVersion)); - } - - internal HttpMessage CreateCreateOrUpdateRequest(string synonymMapName, SynonymMap synonymMap, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Put; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/synonymmaps('", false); - uri.AppendPath(synonymMapName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Prefer", "return=representation"); - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(synonymMap); - request.Content = content; - return message; - } - - /// Creates a new synonym map or updates a synonym map if it already exists. - /// The name of the synonym map to create or update. - /// The definition of the synonym map to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public async Task> CreateOrUpdateAsync(string synonymMapName, SynonymMap synonymMap, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - if (synonymMap == null) - { - throw new ArgumentNullException(nameof(synonymMap)); - } - - using var message = CreateCreateOrUpdateRequest(synonymMapName, synonymMap, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - case 201: - { - SynonymMap value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new synonym map or updates a synonym map if it already exists. - /// The name of the synonym map to create or update. - /// The definition of the synonym map to create or update. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// or is null. - public Response CreateOrUpdate(string synonymMapName, SynonymMap synonymMap, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - if (synonymMap == null) - { - throw new ArgumentNullException(nameof(synonymMap)); - } - - using var message = CreateCreateOrUpdateRequest(synonymMapName, synonymMap, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - case 201: - { - SynonymMap value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateDeleteRequest(string synonymMapName, string ifMatch, string ifNoneMatch) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Delete; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/synonymmaps('", false); - uri.AppendPath(synonymMapName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - if (ifMatch != null) - { - request.Headers.Add("If-Match", ifMatch); - } - if (ifNoneMatch != null) - { - request.Headers.Add("If-None-Match", ifNoneMatch); - } - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Deletes a synonym map. - /// The name of the synonym map to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public async Task DeleteAsync(string synonymMapName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - - using var message = CreateDeleteRequest(synonymMapName, ifMatch, ifNoneMatch); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - /// Deletes a synonym map. - /// The name of the synonym map to delete. - /// Defines the If-Match condition. The operation will be performed only if the ETag on the server matches this value. - /// Defines the If-None-Match condition. The operation will be performed only if the ETag on the server does not match this value. - /// The cancellation token to use. - /// is null. - public Response Delete(string synonymMapName, string ifMatch = null, string ifNoneMatch = null, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - - using var message = CreateDeleteRequest(synonymMapName, ifMatch, ifNoneMatch); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 204: - case 404: - return message.Response; - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateGetRequest(string synonymMapName) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/synonymmaps('", false); - uri.AppendPath(synonymMapName, true); - uri.AppendPath("')", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Retrieves a synonym map definition. - /// The name of the synonym map to retrieve. - /// The cancellation token to use. - /// is null. - public async Task> GetAsync(string synonymMapName, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - - using var message = CreateGetRequest(synonymMapName); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - SynonymMap value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Retrieves a synonym map definition. - /// The name of the synonym map to retrieve. - /// The cancellation token to use. - /// is null. - public Response Get(string synonymMapName, CancellationToken cancellationToken = default) - { - if (synonymMapName == null) - { - throw new ArgumentNullException(nameof(synonymMapName)); - } - - using var message = CreateGetRequest(synonymMapName); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - SynonymMap value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateListRequest(string select) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Get; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/synonymmaps", false); - if (select != null) - { - uri.AppendQuery("$select", select, true); - } - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - return message; - } - - /// Lists all synonym maps available for a search service. - /// Selects which top-level properties of the synonym maps to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public async Task> ListAsync(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 200: - { - ListSynonymMapsResult value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = ListSynonymMapsResult.DeserializeListSynonymMapsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Lists all synonym maps available for a search service. - /// Selects which top-level properties of the synonym maps to retrieve. Specified as a comma-separated list of JSON property names, or '*' for all properties. The default is all properties. - /// The cancellation token to use. - public Response List(string select = null, CancellationToken cancellationToken = default) - { - using var message = CreateListRequest(select); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 200: - { - ListSynonymMapsResult value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = ListSynonymMapsResult.DeserializeListSynonymMapsResult(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - internal HttpMessage CreateCreateRequest(SynonymMap synonymMap) - { - var message = _pipeline.CreateMessage(); - var request = message.Request; - request.Method = RequestMethod.Post; - var uri = new RawRequestUriBuilder(); - uri.AppendRaw(_endpoint, false); - uri.AppendPath("/synonymmaps", false); - uri.AppendQuery("api-version", _apiVersion, true); - request.Uri = uri; - request.Headers.Add("Accept", "application/json; odata.metadata=minimal"); - request.Headers.Add("Content-Type", "application/json"); - var content = new Utf8JsonRequestContent(); - content.JsonWriter.WriteObjectValue(synonymMap); - request.Content = content; - return message; - } - - /// Creates a new synonym map. - /// The definition of the synonym map to create. - /// The cancellation token to use. - /// is null. - public async Task> CreateAsync(SynonymMap synonymMap, CancellationToken cancellationToken = default) - { - if (synonymMap == null) - { - throw new ArgumentNullException(nameof(synonymMap)); - } - - using var message = CreateCreateRequest(synonymMap); - await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); - switch (message.Response.Status) - { - case 201: - { - SynonymMap value = default; - using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions, cancellationToken).ConfigureAwait(false); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - - /// Creates a new synonym map. - /// The definition of the synonym map to create. - /// The cancellation token to use. - /// is null. - public Response Create(SynonymMap synonymMap, CancellationToken cancellationToken = default) - { - if (synonymMap == null) - { - throw new ArgumentNullException(nameof(synonymMap)); - } - - using var message = CreateCreateRequest(synonymMap); - _pipeline.Send(message, cancellationToken); - switch (message.Response.Status) - { - case 201: - { - SynonymMap value = default; - using var document = JsonDocument.Parse(message.Response.ContentStream, ModelSerializationExtensions.JsonDocumentOptions); - value = SynonymMap.DeserializeSynonymMap(document.RootElement); - return Response.FromValue(value, message.Response); - } - default: - throw new RequestFailedException(message.Response); - } - } - } -} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.Serialization.cs new file mode 100644 index 000000000000..dba326438a27 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.Serialization.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class SynonymTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SynonymTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("synonyms"u8); + writer.WriteStartArray(); + foreach (var item in Synonyms) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(IgnoreCase)) + { + writer.WritePropertyName("ignoreCase"u8); + writer.WriteBooleanValue(IgnoreCase.Value); + } + if (Optional.IsDefined(Expand)) + { + writer.WritePropertyName("expand"u8); + writer.WriteBooleanValue(Expand.Value); + } + } + + SynonymTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SynonymTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSynonymTokenFilter(document.RootElement, options); + } + + internal static SynonymTokenFilter DeserializeSynonymTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList synonyms = default; + bool? ignoreCase = default; + bool? expand = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("synonyms"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + synonyms = array; + continue; + } + if (property.NameEquals("ignoreCase"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreCase = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("expand"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + expand = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new SynonymTokenFilter( + odataType, + name, + serializedAdditionalRawData, + synonyms, + ignoreCase, + expand); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SynonymTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + SynonymTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSynonymTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SynonymTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new SynonymTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSynonymTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.cs new file mode 100644 index 000000000000..e980be26120a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/SynonymTokenFilter.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// Matches single or multi-word synonyms in a token stream. This token filter is + /// implemented using Apache Lucene. + /// + public partial class SynonymTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// + /// A list of synonyms in following one of two formats: 1. incredible, + /// unbelievable, fabulous => amazing - all terms on the left side of => symbol + /// will be replaced with all terms on its right side; 2. incredible, unbelievable, + /// fabulous, amazing - comma separated list of equivalent words. Set the expand + /// option to change how this list is interpreted. + /// + /// or is null. + public SynonymTokenFilter(string name, IEnumerable synonyms) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(synonyms, nameof(synonyms)); + + OdataType = "#Microsoft.Azure.Search.SynonymTokenFilter"; + Synonyms = synonyms.ToList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A list of synonyms in following one of two formats: 1. incredible, + /// unbelievable, fabulous => amazing - all terms on the left side of => symbol + /// will be replaced with all terms on its right side; 2. incredible, unbelievable, + /// fabulous, amazing - comma separated list of equivalent words. Set the expand + /// option to change how this list is interpreted. + /// + /// A value indicating whether to case-fold input for matching. Default is false. + /// + /// A value indicating whether all words in the list of synonyms (if => notation is + /// not used) will map to one another. If true, all words in the list of synonyms + /// (if => notation is not used) will map to one another. The following list: + /// incredible, unbelievable, fabulous, amazing is equivalent to: incredible, + /// unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. + /// If false, the following list: incredible, unbelievable, fabulous, amazing will + /// be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. + /// Default is true. + /// + internal SynonymTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, IList synonyms, bool? ignoreCase, bool? expand) : base(odataType, name, serializedAdditionalRawData) + { + Synonyms = synonyms; + IgnoreCase = ignoreCase; + Expand = expand; + } + + /// Initializes a new instance of for deserialization. + internal SynonymTokenFilter() + { + } + + /// + /// A list of synonyms in following one of two formats: 1. incredible, + /// unbelievable, fabulous => amazing - all terms on the left side of => symbol + /// will be replaced with all terms on its right side; 2. incredible, unbelievable, + /// fabulous, amazing - comma separated list of equivalent words. Set the expand + /// option to change how this list is interpreted. + /// + public IList Synonyms { get; } + /// A value indicating whether to case-fold input for matching. Default is false. + public bool? IgnoreCase { get; set; } + /// + /// A value indicating whether all words in the list of synonyms (if => notation is + /// not used) will map to one another. If true, all words in the list of synonyms + /// (if => notation is not used) will map to one another. The following list: + /// incredible, unbelievable, fabulous, amazing is equivalent to: incredible, + /// unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. + /// If false, the following list: incredible, unbelievable, fabulous, amazing will + /// be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. + /// Default is true. + /// + public bool? Expand { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.Serialization.cs new file mode 100644 index 000000000000..3fff49f2021b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.Serialization.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TagScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TagScoringFunction)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("tag"u8); + writer.WriteObjectValue(Parameters, options); + } + + TagScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TagScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTagScoringFunction(document.RootElement, options); + } + + internal static TagScoringFunction DeserializeTagScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TagScoringParameters tag = default; + string fieldName = default; + double boost = default; + ScoringFunctionInterpolation? interpolation = default; + string type = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tag"u8)) + { + tag = TagScoringParameters.DeserializeTagScoringParameters(property.Value, options); + continue; + } + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("boost"u8)) + { + boost = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("interpolation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + interpolation = new ScoringFunctionInterpolation(property.Value.GetString()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TagScoringFunction( + fieldName, + boost, + interpolation, + type, + serializedAdditionalRawData, + tag); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TagScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + TagScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTagScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TagScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new TagScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTagScoringFunction(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.cs new file mode 100644 index 000000000000..06b243b1800a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringFunction.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines a function that boosts scores of documents with string values matching + /// a given list of tags. + /// + public partial class TagScoringFunction : ScoringFunction + { + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// Parameter values for the tag scoring function. + /// or is null. + public TagScoringFunction(string fieldName, double boost, TagScoringParameters parameters) : base(fieldName, boost) + { + Argument.AssertNotNull(fieldName, nameof(fieldName)); + Argument.AssertNotNull(parameters, nameof(parameters)); + + Type = "tag"; + Parameters = parameters; + } + + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + /// Parameter values for the tag scoring function. + internal TagScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData, TagScoringParameters parameters) : base(fieldName, boost, interpolation, type, serializedAdditionalRawData) + { + Parameters = parameters; + } + + /// Initializes a new instance of for deserialization. + internal TagScoringFunction() + { + } + + /// Parameter values for the tag scoring function. + public TagScoringParameters Parameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.Serialization.cs new file mode 100644 index 000000000000..87587b8958e4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.Serialization.cs @@ -0,0 +1,142 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TagScoringParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TagScoringParameters)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("tagsParameter"u8); + writer.WriteStringValue(TagsParameter); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + TagScoringParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TagScoringParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTagScoringParameters(document.RootElement, options); + } + + internal static TagScoringParameters DeserializeTagScoringParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string tagsParameter = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("tagsParameter"u8)) + { + tagsParameter = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TagScoringParameters(tagsParameter, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TagScoringParameters)} does not support writing '{options.Format}' format."); + } + } + + TagScoringParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTagScoringParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TagScoringParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static TagScoringParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTagScoringParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.cs new file mode 100644 index 000000000000..b7bbcd0aade1 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TagScoringParameters.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Provides parameter values to a tag scoring function. + public partial class TagScoringParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the parameter passed in search queries to specify the list of tags + /// to compare against the target field. + /// + /// is null. + public TagScoringParameters(string tagsParameter) + { + Argument.AssertNotNull(tagsParameter, nameof(tagsParameter)); + + TagsParameter = tagsParameter; + } + + /// Initializes a new instance of . + /// + /// The name of the parameter passed in search queries to specify the list of tags + /// to compare against the target field. + /// + /// Keeps track of any properties unknown to the library. + internal TagScoringParameters(string tagsParameter, IDictionary serializedAdditionalRawData) + { + TagsParameter = tagsParameter; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal TagScoringParameters() + { + } + + /// + /// The name of the parameter passed in search queries to specify the list of tags + /// to compare against the target field. + /// + public string TagsParameter { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextResult.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextResult.Serialization.cs new file mode 100644 index 000000000000..74d8b8eaf5bc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextResult.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TextResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextResult)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(SearchScore)) + { + writer.WritePropertyName("searchScore"u8); + writer.WriteNumberValue(SearchScore.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + TextResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTextResult(document.RootElement, options); + } + + internal static TextResult DeserializeTextResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double? searchScore = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("searchScore"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + searchScore = property.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TextResult(searchScore, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TextResult)} does not support writing '{options.Format}' format."); + } + } + + TextResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TextResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static TextResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextResult.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextResult.cs new file mode 100644 index 000000000000..07f0e0b04d95 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextResult.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// The BM25 or Classic score for the text portion of the query. + public partial class TextResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal TextResult() + { + } + + /// Initializes a new instance of . + /// The BM25 or Classic score for the text portion of the query. + /// Keeps track of any properties unknown to the library. + internal TextResult(double? searchScore, IDictionary serializedAdditionalRawData) + { + SearchScore = searchScore; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The BM25 or Classic score for the text portion of the query. + public double? SearchScore { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextSplitMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextSplitMode.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/TextSplitMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/TextSplitMode.cs index e56753573b1a..0d4fa9f6490d 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextSplitMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextSplitMode.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// A value indicating which split mode to perform. public readonly partial struct TextSplitMode : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.Serialization.cs new file mode 100644 index 000000000000..c511d992d421 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.Serialization.cs @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TextTranslationSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextTranslationSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("defaultToLanguageCode"u8); + writer.WriteStringValue(DefaultToLanguageCode.ToString()); + if (Optional.IsDefined(DefaultFromLanguageCode)) + { + writer.WritePropertyName("defaultFromLanguageCode"u8); + writer.WriteStringValue(DefaultFromLanguageCode.Value.ToString()); + } + if (Optional.IsDefined(SuggestedFrom)) + { + writer.WritePropertyName("suggestedFrom"u8); + writer.WriteStringValue(SuggestedFrom.Value.ToString()); + } + } + + TextTranslationSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextTranslationSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTextTranslationSkill(document.RootElement, options); + } + + internal static TextTranslationSkill DeserializeTextTranslationSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TextTranslationSkillLanguage defaultToLanguageCode = default; + TextTranslationSkillLanguage? defaultFromLanguageCode = default; + TextTranslationSkillLanguage? suggestedFrom = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("defaultToLanguageCode"u8)) + { + defaultToLanguageCode = new TextTranslationSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("defaultFromLanguageCode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultFromLanguageCode = new TextTranslationSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("suggestedFrom"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + suggestedFrom = new TextTranslationSkillLanguage(property.Value.GetString()); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TextTranslationSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + defaultToLanguageCode, + defaultFromLanguageCode, + suggestedFrom); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TextTranslationSkill)} does not support writing '{options.Format}' format."); + } + } + + TextTranslationSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextTranslationSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TextTranslationSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new TextTranslationSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextTranslationSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.cs new file mode 100644 index 000000000000..95b9575c2ab5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkill.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// A skill to translate text from one language to another. + public partial class TextTranslationSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// + /// The language code to translate documents into for documents that don't specify + /// the to language explicitly. + /// + /// or is null. + public TextTranslationSkill(IEnumerable inputs, IEnumerable outputs, TextTranslationSkillLanguage defaultToLanguageCode) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + + OdataType = "#Microsoft.Skills.Text.TranslationSkill"; + DefaultToLanguageCode = defaultToLanguageCode; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The language code to translate documents into for documents that don't specify + /// the to language explicitly. + /// + /// + /// The language code to translate documents from for documents that don't specify + /// the from language explicitly. + /// + /// + /// The language code to translate documents from when neither the fromLanguageCode + /// input nor the defaultFromLanguageCode parameter are provided, and the automatic + /// language detection is unsuccessful. Default is `en`. + /// + internal TextTranslationSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, TextTranslationSkillLanguage defaultToLanguageCode, TextTranslationSkillLanguage? defaultFromLanguageCode, TextTranslationSkillLanguage? suggestedFrom) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + DefaultToLanguageCode = defaultToLanguageCode; + DefaultFromLanguageCode = defaultFromLanguageCode; + SuggestedFrom = suggestedFrom; + } + + /// Initializes a new instance of for deserialization. + internal TextTranslationSkill() + { + } + + /// + /// The language code to translate documents into for documents that don't specify + /// the to language explicitly. + /// + public TextTranslationSkillLanguage DefaultToLanguageCode { get; set; } + /// + /// The language code to translate documents from for documents that don't specify + /// the from language explicitly. + /// + public TextTranslationSkillLanguage? DefaultFromLanguageCode { get; set; } + /// + /// The language code to translate documents from when neither the fromLanguageCode + /// input nor the defaultFromLanguageCode parameter are provided, and the automatic + /// language detection is unsuccessful. Default is `en`. + /// + public TextTranslationSkillLanguage? SuggestedFrom { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkillLanguage.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkillLanguage.cs similarity index 99% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkillLanguage.cs rename to sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkillLanguage.cs index c3f0166ee1fc..64ed4c6930e3 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/TextTranslationSkillLanguage.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextTranslationSkillLanguage.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The language codes supported for input text by TextTranslationSkill. public readonly partial struct TextTranslationSkillLanguage : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.Serialization.cs new file mode 100644 index 000000000000..10a0837cc143 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TextWeights : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextWeights)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("weights"u8); + writer.WriteStartObject(); + foreach (var item in Weights) + { + writer.WritePropertyName(item.Key); + writer.WriteNumberValue(item.Value); + } + writer.WriteEndObject(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + TextWeights IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextWeights)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTextWeights(document.RootElement, options); + } + + internal static TextWeights DeserializeTextWeights(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary weights = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("weights"u8)) + { + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetDouble()); + } + weights = dictionary; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TextWeights(weights, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TextWeights)} does not support writing '{options.Format}' format."); + } + } + + TextWeights IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextWeights(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TextWeights)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static TextWeights FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTextWeights(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.cs b/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.cs new file mode 100644 index 000000000000..d2fab6b89ad8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TextWeights.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Defines weights on index fields for which matches should boost scoring in + /// search queries. + /// + public partial class TextWeights + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The dictionary of per-field weights to boost document scoring. The keys are + /// field names and the values are the weights for each field. + /// + /// is null. + public TextWeights(IDictionary weights) + { + Argument.AssertNotNull(weights, nameof(weights)); + + Weights = weights; + } + + /// Initializes a new instance of . + /// + /// The dictionary of per-field weights to boost document scoring. The keys are + /// field names and the values are the weights for each field. + /// + /// Keeps track of any properties unknown to the library. + internal TextWeights(IDictionary weights, IDictionary serializedAdditionalRawData) + { + Weights = weights; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal TextWeights() + { + } + + /// + /// The dictionary of per-field weights to boost document scoring. The keys are + /// field names and the values are the weights for each field. + /// + public IDictionary Weights { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TokenCharacterKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/TokenCharacterKind.cs new file mode 100644 index 000000000000..0108f0d23d1a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TokenCharacterKind.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Represents classes of characters on which a token filter can operate. + public readonly partial struct TokenCharacterKind : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public TokenCharacterKind(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LetterValue = "letter"; + private const string DigitValue = "digit"; + private const string WhitespaceValue = "whitespace"; + private const string PunctuationValue = "punctuation"; + private const string SymbolValue = "symbol"; + + /// Keeps letters in tokens. + public static TokenCharacterKind Letter { get; } = new TokenCharacterKind(LetterValue); + /// Keeps digits in tokens. + public static TokenCharacterKind Digit { get; } = new TokenCharacterKind(DigitValue); + /// Keeps whitespace in tokens. + public static TokenCharacterKind Whitespace { get; } = new TokenCharacterKind(WhitespaceValue); + /// Keeps punctuation in tokens. + public static TokenCharacterKind Punctuation { get; } = new TokenCharacterKind(PunctuationValue); + /// Keeps symbols in tokens. + public static TokenCharacterKind Symbol { get; } = new TokenCharacterKind(SymbolValue); + /// Determines if two values are the same. + public static bool operator ==(TokenCharacterKind left, TokenCharacterKind right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(TokenCharacterKind left, TokenCharacterKind right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator TokenCharacterKind(string value) => new TokenCharacterKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is TokenCharacterKind other && Equals(other); + /// + public bool Equals(TokenCharacterKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.Serialization.cs new file mode 100644 index 000000000000..456c79339cd9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.Serialization.cs @@ -0,0 +1,158 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownTokenFilter))] + public partial class TokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TokenFilter)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("@odata.type"u8); + writer.WriteStringValue(OdataType); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + TokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTokenFilter(document.RootElement, options); + } + + internal static TokenFilter DeserializeTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("@odata.type", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "#Microsoft.Azure.Search.AsciiFoldingTokenFilter": return AsciiFoldingTokenFilter.DeserializeAsciiFoldingTokenFilter(element, options); + case "#Microsoft.Azure.Search.CjkBigramTokenFilter": return CjkBigramTokenFilter.DeserializeCjkBigramTokenFilter(element, options); + case "#Microsoft.Azure.Search.CommonGramTokenFilter": return CommonGramTokenFilter.DeserializeCommonGramTokenFilter(element, options); + case "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter": return DictionaryDecompounderTokenFilter.DeserializeDictionaryDecompounderTokenFilter(element, options); + case "#Microsoft.Azure.Search.EdgeNGramTokenFilterV2": return Search.Documents.Indexes.Models.EdgeNGramTokenFilter.DeserializeEdgeNGramTokenFilter(element, options); + case "#Microsoft.Azure.Search.ElisionTokenFilter": return ElisionTokenFilter.DeserializeElisionTokenFilter(element, options); + case "#Microsoft.Azure.Search.KeepTokenFilter": return KeepTokenFilter.DeserializeKeepTokenFilter(element, options); + case "#Microsoft.Azure.Search.KeywordMarkerTokenFilter": return KeywordMarkerTokenFilter.DeserializeKeywordMarkerTokenFilter(element, options); + case "#Microsoft.Azure.Search.LengthTokenFilter": return LengthTokenFilter.DeserializeLengthTokenFilter(element, options); + case "#Microsoft.Azure.Search.LimitTokenFilter": return LimitTokenFilter.DeserializeLimitTokenFilter(element, options); + case "#Microsoft.Azure.Search.NGramTokenFilterV2": return Search.Documents.Indexes.Models.NGramTokenFilter.DeserializeNGramTokenFilter(element, options); + case "#Microsoft.Azure.Search.PatternCaptureTokenFilter": return PatternCaptureTokenFilter.DeserializePatternCaptureTokenFilter(element, options); + case "#Microsoft.Azure.Search.PatternReplaceTokenFilter": return PatternReplaceTokenFilter.DeserializePatternReplaceTokenFilter(element, options); + case "#Microsoft.Azure.Search.PhoneticTokenFilter": return PhoneticTokenFilter.DeserializePhoneticTokenFilter(element, options); + case "#Microsoft.Azure.Search.ShingleTokenFilter": return ShingleTokenFilter.DeserializeShingleTokenFilter(element, options); + case "#Microsoft.Azure.Search.SnowballTokenFilter": return SnowballTokenFilter.DeserializeSnowballTokenFilter(element, options); + case "#Microsoft.Azure.Search.StemmerOverrideTokenFilter": return StemmerOverrideTokenFilter.DeserializeStemmerOverrideTokenFilter(element, options); + case "#Microsoft.Azure.Search.StemmerTokenFilter": return StemmerTokenFilter.DeserializeStemmerTokenFilter(element, options); + case "#Microsoft.Azure.Search.StopwordsTokenFilter": return StopwordsTokenFilter.DeserializeStopwordsTokenFilter(element, options); + case "#Microsoft.Azure.Search.SynonymTokenFilter": return SynonymTokenFilter.DeserializeSynonymTokenFilter(element, options); + case "#Microsoft.Azure.Search.TruncateTokenFilter": return TruncateTokenFilter.DeserializeTruncateTokenFilter(element, options); + case "#Microsoft.Azure.Search.UniqueTokenFilter": return UniqueTokenFilter.DeserializeUniqueTokenFilter(element, options); + case "#Microsoft.Azure.Search.WordDelimiterTokenFilter": return WordDelimiterTokenFilter.DeserializeWordDelimiterTokenFilter(element, options); + } + } + return UnknownTokenFilter.DeserializeUnknownTokenFilter(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TokenFilter)} does not support writing '{options.Format}' format."); + } + } + + TokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static TokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTokenFilter(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.cs new file mode 100644 index 000000000000..6807af104daa --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilter.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents +{ + /// + /// Base type for token filters. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , and . + /// + public abstract partial class TokenFilter + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + protected TokenFilter(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal TokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData) + { + OdataType = odataType; + Name = name; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal TokenFilter() + { + } + + /// The discriminator for derived types. + internal string OdataType { get; set; } + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + public string Name { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TokenFilterName.cs b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilterName.cs new file mode 100644 index 000000000000..255cbf9fdfbe --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TokenFilterName.cs @@ -0,0 +1,261 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// Defines the names of all token filters supported by the search engine. + public readonly partial struct TokenFilterName : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public TokenFilterName(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ArabicNormalizationValue = "arabic_normalization"; + private const string ApostropheValue = "apostrophe"; + private const string AsciiFoldingValue = "asciifolding"; + private const string CjkBigramValue = "cjk_bigram"; + private const string CjkWidthValue = "cjk_width"; + private const string ClassicValue = "classic"; + private const string CommonGramValue = "common_grams"; + private const string EdgeNGramValue = "edgeNGram_v2"; + private const string ElisionValue = "elision"; + private const string GermanNormalizationValue = "german_normalization"; + private const string HindiNormalizationValue = "hindi_normalization"; + private const string IndicNormalizationValue = "indic_normalization"; + private const string KeywordRepeatValue = "keyword_repeat"; + private const string KStemValue = "kstem"; + private const string LengthValue = "length"; + private const string LimitValue = "limit"; + private const string LowercaseValue = "lowercase"; + private const string NGramValue = "nGram_v2"; + private const string PersianNormalizationValue = "persian_normalization"; + private const string PhoneticValue = "phonetic"; + private const string PorterStemValue = "porter_stem"; + private const string ReverseValue = "reverse"; + private const string ScandinavianNormalizationValue = "scandinavian_normalization"; + private const string ScandinavianFoldingNormalizationValue = "scandinavian_folding"; + private const string ShingleValue = "shingle"; + private const string SnowballValue = "snowball"; + private const string SoraniNormalizationValue = "sorani_normalization"; + private const string StemmerValue = "stemmer"; + private const string StopwordsValue = "stopwords"; + private const string TrimValue = "trim"; + private const string TruncateValue = "truncate"; + private const string UniqueValue = "unique"; + private const string UppercaseValue = "uppercase"; + private const string WordDelimiterValue = "word_delimiter"; + + /// + /// A token filter that applies the Arabic normalizer to normalize the orthography. + /// See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ar/ArabicNormalizationFilter.html + /// + public static TokenFilterName ArabicNormalization { get; } = new TokenFilterName(ArabicNormalizationValue); + /// + /// Strips all characters after an apostrophe (including the apostrophe itself). + /// See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/tr/ApostropheFilter.html + /// + public static TokenFilterName Apostrophe { get; } = new TokenFilterName(ApostropheValue); + /// + /// Converts alphabetic, numeric, and symbolic Unicode characters which are not in + /// the first 127 ASCII characters (the "Basic Latin" Unicode block) into their + /// ASCII equivalents, if such equivalents exist. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html + /// + public static TokenFilterName AsciiFolding { get; } = new TokenFilterName(AsciiFoldingValue); + /// + /// Forms bigrams of CJK terms that are generated from the standard tokenizer. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKBigramFilter.html + /// + public static TokenFilterName CjkBigram { get; } = new TokenFilterName(CjkBigramValue); + /// + /// Normalizes CJK width differences. Folds full-width ASCII variants into the + /// equivalent basic Latin, and half-width Katakana variants into the equivalent + /// Kana. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKWidthFilter.html + /// + public static TokenFilterName CjkWidth { get; } = new TokenFilterName(CjkWidthValue); + /// + /// Removes English possessives, and dots from acronyms. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicFilter.html + /// + public static TokenFilterName Classic { get; } = new TokenFilterName(ClassicValue); + /// + /// Construct bigrams for frequently occurring terms while indexing. Single terms + /// are still indexed too, with bigrams overlaid. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/commongrams/CommonGramsFilter.html + /// + public static TokenFilterName CommonGram { get; } = new TokenFilterName(CommonGramValue); + /// + /// Generates n-grams of the given size(s) starting from the front or the back of + /// an input token. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.html + /// + public static TokenFilterName EdgeNGram { get; } = new TokenFilterName(EdgeNGramValue); + /// + /// Removes elisions. For example, "l'avion" (the plane) will be converted to + /// "avion" (plane). See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html + /// + public static TokenFilterName Elision { get; } = new TokenFilterName(ElisionValue); + /// + /// Normalizes German characters according to the heuristics of the German2 + /// snowball algorithm. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/de/GermanNormalizationFilter.html + /// + public static TokenFilterName GermanNormalization { get; } = new TokenFilterName(GermanNormalizationValue); + /// + /// Normalizes text in Hindi to remove some differences in spelling variations. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/hi/HindiNormalizationFilter.html + /// + public static TokenFilterName HindiNormalization { get; } = new TokenFilterName(HindiNormalizationValue); + /// + /// Normalizes the Unicode representation of text in Indian languages. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/in/IndicNormalizationFilter.html + /// + public static TokenFilterName IndicNormalization { get; } = new TokenFilterName(IndicNormalizationValue); + /// + /// Emits each incoming token twice, once as keyword and once as non-keyword. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/KeywordRepeatFilter.html + /// + public static TokenFilterName KeywordRepeat { get; } = new TokenFilterName(KeywordRepeatValue); + /// + /// A high-performance kstem filter for English. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/en/KStemFilter.html + /// + public static TokenFilterName KStem { get; } = new TokenFilterName(KStemValue); + /// + /// Removes words that are too long or too short. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LengthFilter.html + /// + public static TokenFilterName Length { get; } = new TokenFilterName(LengthValue); + /// + /// Limits the number of tokens while indexing. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LimitTokenCountFilter.html + /// + public static TokenFilterName Limit { get; } = new TokenFilterName(LimitValue); + /// + /// Normalizes token text to lower case. See + /// https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html + /// + public static TokenFilterName Lowercase { get; } = new TokenFilterName(LowercaseValue); + /// + /// Generates n-grams of the given size(s). See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenFilter.html + /// + public static TokenFilterName NGram { get; } = new TokenFilterName(NGramValue); + /// + /// Applies normalization for Persian. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/fa/PersianNormalizationFilter.html + /// + public static TokenFilterName PersianNormalization { get; } = new TokenFilterName(PersianNormalizationValue); + /// + /// Create tokens for phonetic matches. See + /// https://lucene.apache.org/core/4_10_3/analyzers-phonetic/org/apache/lucene/analysis/phonetic/package-tree.html + /// + public static TokenFilterName Phonetic { get; } = new TokenFilterName(PhoneticValue); + /// + /// Uses the Porter stemming algorithm to transform the token stream. See + /// http://tartarus.org/~martin/PorterStemmer + /// + public static TokenFilterName PorterStem { get; } = new TokenFilterName(PorterStemValue); + /// + /// Reverses the token string. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html + /// + public static TokenFilterName Reverse { get; } = new TokenFilterName(ReverseValue); + /// + /// Normalizes use of the interchangeable Scandinavian characters. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianNormalizationFilter.html + /// + public static TokenFilterName ScandinavianNormalization { get; } = new TokenFilterName(ScandinavianNormalizationValue); + /// + /// Folds Scandinavian characters åÅäæÄÆ->a and öÖøØ->o. It also + /// discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just + /// the first one. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianFoldingFilter.html + /// + public static TokenFilterName ScandinavianFoldingNormalization { get; } = new TokenFilterName(ScandinavianFoldingNormalizationValue); + /// + /// Creates combinations of tokens as a single token. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/shingle/ShingleFilter.html + /// + public static TokenFilterName Shingle { get; } = new TokenFilterName(ShingleValue); + /// + /// A filter that stems words using a Snowball-generated stemmer. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/snowball/SnowballFilter.html + /// + public static TokenFilterName Snowball { get; } = new TokenFilterName(SnowballValue); + /// + /// Normalizes the Unicode representation of Sorani text. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ckb/SoraniNormalizationFilter.html + /// + public static TokenFilterName SoraniNormalization { get; } = new TokenFilterName(SoraniNormalizationValue); + /// + /// Language specific stemming filter. See + /// https://learn.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#TokenFilters + /// + public static TokenFilterName Stemmer { get; } = new TokenFilterName(StemmerValue); + /// + /// Removes stop words from a token stream. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopFilter.html + /// + public static TokenFilterName Stopwords { get; } = new TokenFilterName(StopwordsValue); + /// + /// Trims leading and trailing whitespace from tokens. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TrimFilter.html + /// + public static TokenFilterName Trim { get; } = new TokenFilterName(TrimValue); + /// + /// Truncates the terms to a specific length. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilter.html + /// + public static TokenFilterName Truncate { get; } = new TokenFilterName(TruncateValue); + /// + /// Filters out tokens with same text as the previous token. See + /// http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/RemoveDuplicatesTokenFilter.html + /// + public static TokenFilterName Unique { get; } = new TokenFilterName(UniqueValue); + /// + /// Normalizes token text to upper case. See + /// https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html + /// + public static TokenFilterName Uppercase { get; } = new TokenFilterName(UppercaseValue); + /// + /// Splits words into subwords and performs optional transformations on subword + /// groups. + /// + public static TokenFilterName WordDelimiter { get; } = new TokenFilterName(WordDelimiterValue); + /// Determines if two values are the same. + public static bool operator ==(TokenFilterName left, TokenFilterName right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(TokenFilterName left, TokenFilterName right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator TokenFilterName(string value) => new TokenFilterName(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is TokenFilterName other && Equals(other); + /// + public bool Equals(TokenFilterName other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.Serialization.cs new file mode 100644 index 000000000000..8e9c6bff6be9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class TruncateTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TruncateTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Length)) + { + writer.WritePropertyName("length"u8); + writer.WriteNumberValue(Length.Value); + } + } + + TruncateTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TruncateTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTruncateTokenFilter(document.RootElement, options); + } + + internal static TruncateTokenFilter DeserializeTruncateTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? length = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("length"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + length = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TruncateTokenFilter(odataType, name, serializedAdditionalRawData, length); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TruncateTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + TruncateTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTruncateTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TruncateTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new TruncateTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTruncateTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.cs new file mode 100644 index 000000000000..1d8c5c064fe9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/TruncateTokenFilter.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Truncates the terms to a specific length. This token filter is implemented + /// using Apache Lucene. + /// + public partial class TruncateTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public TruncateTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.TruncateTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// The length at which terms will be truncated. Default and maximum is 300. + internal TruncateTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, int? length) : base(odataType, name, serializedAdditionalRawData) + { + Length = length; + } + + /// Initializes a new instance of for deserialization. + internal TruncateTokenFilter() + { + } + + /// The length at which terms will be truncated. Default and maximum is 300. + public int? Length { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.Serialization.cs new file mode 100644 index 000000000000..2490da750555 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class UaxUrlEmailTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UaxUrlEmailTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(MaxTokenLength)) + { + writer.WritePropertyName("maxTokenLength"u8); + writer.WriteNumberValue(MaxTokenLength.Value); + } + } + + UaxUrlEmailTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UaxUrlEmailTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeUaxUrlEmailTokenizer(document.RootElement, options); + } + + internal static UaxUrlEmailTokenizer DeserializeUaxUrlEmailTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? maxTokenLength = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("maxTokenLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxTokenLength = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UaxUrlEmailTokenizer(odataType, name, serializedAdditionalRawData, maxTokenLength); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(UaxUrlEmailTokenizer)} does not support writing '{options.Format}' format."); + } + } + + UaxUrlEmailTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUaxUrlEmailTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(UaxUrlEmailTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UaxUrlEmailTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUaxUrlEmailTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.cs new file mode 100644 index 000000000000..4899bb0d2875 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UaxUrlEmailTokenizer.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Tokenizes urls and emails as one token. This tokenizer is implemented using + /// Apache Lucene. + /// + public partial class UaxUrlEmailTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// is null. + public UaxUrlEmailTokenizer(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + internal UaxUrlEmailTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData, int? maxTokenLength) : base(odataType, name, serializedAdditionalRawData) + { + MaxTokenLength = maxTokenLength; + } + + /// Initializes a new instance of for deserialization. + internal UaxUrlEmailTokenizer() + { + } + + /// + /// The maximum token length. Default is 255. Tokens longer than the maximum length + /// are split. The maximum token length that can be used is 300 characters. + /// + public int? MaxTokenLength { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.Serialization.cs new file mode 100644 index 000000000000..beeea60230b3 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class UniqueTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UniqueTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(OnlyOnSamePosition)) + { + writer.WritePropertyName("onlyOnSamePosition"u8); + writer.WriteBooleanValue(OnlyOnSamePosition.Value); + } + } + + UniqueTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UniqueTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeUniqueTokenFilter(document.RootElement, options); + } + + internal static UniqueTokenFilter DeserializeUniqueTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? onlyOnSamePosition = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("onlyOnSamePosition"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + onlyOnSamePosition = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UniqueTokenFilter(odataType, name, serializedAdditionalRawData, onlyOnSamePosition); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(UniqueTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + UniqueTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUniqueTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(UniqueTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UniqueTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUniqueTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.cs new file mode 100644 index 000000000000..96f5a743acd8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UniqueTokenFilter.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Filters out tokens with same text as the previous token. This token filter is + /// implemented using Apache Lucene. + /// + public partial class UniqueTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public UniqueTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.UniqueTokenFilter"; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A value indicating whether to remove duplicates only at the same position. + /// Default is false. + /// + internal UniqueTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, bool? onlyOnSamePosition) : base(odataType, name, serializedAdditionalRawData) + { + OnlyOnSamePosition = onlyOnSamePosition; + } + + /// Initializes a new instance of for deserialization. + internal UniqueTokenFilter() + { + } + + /// + /// A value indicating whether to remove duplicates only at the same position. + /// Default is false. + /// + public bool? OnlyOnSamePosition { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.Serialization.cs new file mode 100644 index 000000000000..bf8be88d7353 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownCharFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CharFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + CharFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CharFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCharFilter(document.RootElement, options); + } + + internal static UnknownCharFilter DeserializeUnknownCharFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownCharFilter(odataType, name, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CharFilter)} does not support writing '{options.Format}' format."); + } + } + + CharFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCharFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CharFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownCharFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownCharFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.cs new file mode 100644 index 000000000000..19517c05162c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCharFilter.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of CharFilter. + internal partial class UnknownCharFilter : CharFilter + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownCharFilter(string odataType, string name, IDictionary serializedAdditionalRawData) : base(odataType, name, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownCharFilter() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.Serialization.cs new file mode 100644 index 000000000000..ca45f2268fb5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownCognitiveServicesAccount : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + CognitiveServicesAccount IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCognitiveServicesAccount(document.RootElement, options); + } + + internal static UnknownCognitiveServicesAccount DeserializeUnknownCognitiveServicesAccount(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string description = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownCognitiveServicesAccount(odataType, description, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support writing '{options.Format}' format."); + } + } + + CognitiveServicesAccount IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCognitiveServicesAccount(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CognitiveServicesAccount)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownCognitiveServicesAccount FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownCognitiveServicesAccount(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.cs new file mode 100644 index 000000000000..9f9841b075ad --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownCognitiveServicesAccount.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of CognitiveServicesAccount. + internal partial class UnknownCognitiveServicesAccount : CognitiveServicesAccount + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Description of the Azure AI service resource attached to a skillset. + /// Keeps track of any properties unknown to the library. + internal UnknownCognitiveServicesAccount(string odataType, string description, IDictionary serializedAdditionalRawData) : base(odataType, description, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownCognitiveServicesAccount() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..88017bde7e74 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownDataChangeDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + DataChangeDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDataChangeDetectionPolicy(document.RootElement, options); + } + + internal static UnknownDataChangeDetectionPolicy DeserializeUnknownDataChangeDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownDataChangeDetectionPolicy(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + DataChangeDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataChangeDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DataChangeDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownDataChangeDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownDataChangeDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.cs new file mode 100644 index 000000000000..ae6db0f40e6b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataChangeDetectionPolicy.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of DataChangeDetectionPolicy. + internal partial class UnknownDataChangeDetectionPolicy : DataChangeDetectionPolicy + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal UnknownDataChangeDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownDataChangeDetectionPolicy() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.Serialization.cs new file mode 100644 index 000000000000..8aa4ba8e09c4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownDataDeletionDetectionPolicy : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + DataDeletionDetectionPolicy IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDataDeletionDetectionPolicy(document.RootElement, options); + } + + internal static UnknownDataDeletionDetectionPolicy DeserializeUnknownDataDeletionDetectionPolicy(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownDataDeletionDetectionPolicy(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support writing '{options.Format}' format."); + } + } + + DataDeletionDetectionPolicy IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeDataDeletionDetectionPolicy(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DataDeletionDetectionPolicy)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownDataDeletionDetectionPolicy FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownDataDeletionDetectionPolicy(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.cs new file mode 100644 index 000000000000..320997b2df34 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownDataDeletionDetectionPolicy.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of DataDeletionDetectionPolicy. + internal partial class UnknownDataDeletionDetectionPolicy : DataDeletionDetectionPolicy + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal UnknownDataDeletionDetectionPolicy(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownDataDeletionDetectionPolicy() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.Serialization.cs new file mode 100644 index 000000000000..b4061c191926 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownLexicalAnalyzer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + LexicalAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalAnalyzer(document.RootElement, options); + } + + internal static UnknownLexicalAnalyzer DeserializeUnknownLexicalAnalyzer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownLexicalAnalyzer(odataType, name, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + LexicalAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownLexicalAnalyzer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownLexicalAnalyzer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.cs new file mode 100644 index 000000000000..6ac9cd0f3c38 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalAnalyzer.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of LexicalAnalyzer. + internal partial class UnknownLexicalAnalyzer : LexicalAnalyzer + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownLexicalAnalyzer(string odataType, string name, IDictionary serializedAdditionalRawData) : base(odataType, name, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownLexicalAnalyzer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.Serialization.cs new file mode 100644 index 000000000000..ada54b75001a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownLexicalNormalizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + LexicalNormalizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalNormalizer(document.RootElement, options); + } + + internal static UnknownLexicalNormalizer DeserializeUnknownLexicalNormalizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownLexicalNormalizer(odataType, name, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support writing '{options.Format}' format."); + } + } + + LexicalNormalizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalNormalizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalNormalizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownLexicalNormalizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownLexicalNormalizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.cs new file mode 100644 index 000000000000..bb8e949d6777 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalNormalizer.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of LexicalNormalizer. + internal partial class UnknownLexicalNormalizer : LexicalNormalizer + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the char filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownLexicalNormalizer(string odataType, string name, IDictionary serializedAdditionalRawData) : base(odataType, name, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownLexicalNormalizer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.Serialization.cs new file mode 100644 index 000000000000..aa0cd3c40f50 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownLexicalTokenizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + LexicalTokenizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLexicalTokenizer(document.RootElement, options); + } + + internal static UnknownLexicalTokenizer DeserializeUnknownLexicalTokenizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownLexicalTokenizer(odataType, name, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support writing '{options.Format}' format."); + } + } + + LexicalTokenizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeLexicalTokenizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LexicalTokenizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownLexicalTokenizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownLexicalTokenizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.cs new file mode 100644 index 000000000000..5fa4e02e9ae5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownLexicalTokenizer.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of LexicalTokenizer. + internal partial class UnknownLexicalTokenizer : LexicalTokenizer + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes + /// or underscores, can only start and end with alphanumeric characters, and is + /// limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownLexicalTokenizer(string odataType, string name, IDictionary serializedAdditionalRawData) : base(odataType, name, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownLexicalTokenizer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.Serialization.cs new file mode 100644 index 000000000000..67ad13c66a24 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.Serialization.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownScoringFunction : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringFunction)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + ScoringFunction IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ScoringFunction)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeScoringFunction(document.RootElement, options); + } + + internal static UnknownScoringFunction DeserializeUnknownScoringFunction(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string fieldName = default; + double boost = default; + ScoringFunctionInterpolation? interpolation = default; + string type = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("fieldName"u8)) + { + fieldName = property.Value.GetString(); + continue; + } + if (property.NameEquals("boost"u8)) + { + boost = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("interpolation"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + interpolation = new ScoringFunctionInterpolation(property.Value.GetString()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownScoringFunction(fieldName, boost, interpolation, type, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ScoringFunction)} does not support writing '{options.Format}' format."); + } + } + + ScoringFunction IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeScoringFunction(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ScoringFunction)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownScoringFunction FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownScoringFunction(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.cs new file mode 100644 index 000000000000..07dc95fe3b1d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownScoringFunction.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of ScoringFunction. + internal partial class UnknownScoringFunction : ScoringFunction + { + /// Initializes a new instance of . + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// + /// A value indicating how boosting will be interpolated across document scores; + /// defaults to "Linear". + /// + /// Type of ScoringFunction. + /// Keeps track of any properties unknown to the library. + internal UnknownScoringFunction(string fieldName, double boost, ScoringFunctionInterpolation? interpolation, string type, IDictionary serializedAdditionalRawData) : base(fieldName, boost, interpolation, type, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownScoringFunction() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.Serialization.cs new file mode 100644 index 000000000000..99e32b5086f0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownSearchIndexerDataIdentity : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + SearchIndexerDataIdentity IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerDataIdentity(document.RootElement, options); + } + + internal static UnknownSearchIndexerDataIdentity DeserializeUnknownSearchIndexerDataIdentity(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownSearchIndexerDataIdentity(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerDataIdentity IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerDataIdentity(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerDataIdentity)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownSearchIndexerDataIdentity FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownSearchIndexerDataIdentity(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.cs new file mode 100644 index 000000000000..968c4cbdbd19 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerDataIdentity.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of SearchIndexerDataIdentity. + internal partial class UnknownSearchIndexerDataIdentity : SearchIndexerDataIdentity + { + /// Initializes a new instance of . + /// A URI fragment specifying the type of identity. + /// Keeps track of any properties unknown to the library. + internal UnknownSearchIndexerDataIdentity(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownSearchIndexerDataIdentity() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.Serialization.cs new file mode 100644 index 000000000000..1d90431d6ae0 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownSearchIndexerSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + SearchIndexerSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSearchIndexerSkill(document.RootElement, options); + } + + internal static UnknownSearchIndexerSkill DeserializeUnknownSearchIndexerSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownSearchIndexerSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support writing '{options.Format}' format."); + } + } + + SearchIndexerSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSearchIndexerSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SearchIndexerSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownSearchIndexerSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownSearchIndexerSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.cs new file mode 100644 index 000000000000..0889451a4536 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSearchIndexerSkill.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of SearchIndexerSkill. + internal partial class UnknownSearchIndexerSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownSearchIndexerSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownSearchIndexerSkill() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.Serialization.cs new file mode 100644 index 000000000000..73ccd77ad227 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownSimilarityAlgorithm : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + SimilarityAlgorithm IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSimilarityAlgorithm(document.RootElement, options); + } + + internal static UnknownSimilarityAlgorithm DeserializeUnknownSimilarityAlgorithm(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownSimilarityAlgorithm(odataType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support writing '{options.Format}' format."); + } + } + + SimilarityAlgorithm IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeSimilarityAlgorithm(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SimilarityAlgorithm)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownSimilarityAlgorithm FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownSimilarityAlgorithm(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.cs new file mode 100644 index 000000000000..3b27ac4aafd7 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownSimilarityAlgorithm.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of SimilarityAlgorithm. + internal partial class UnknownSimilarityAlgorithm : SimilarityAlgorithm + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// Keeps track of any properties unknown to the library. + internal UnknownSimilarityAlgorithm(string odataType, IDictionary serializedAdditionalRawData) : base(odataType, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownSimilarityAlgorithm() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.Serialization.cs new file mode 100644 index 000000000000..ab3bb16af45a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + TokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTokenFilter(document.RootElement, options); + } + + internal static UnknownTokenFilter DeserializeUnknownTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string odataType = "Unknown"; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownTokenFilter(odataType, name, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TokenFilter)} does not support writing '{options.Format}' format."); + } + } + + TokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.cs new file mode 100644 index 000000000000..30ecd1f156f5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownTokenFilter.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of TokenFilter. + internal partial class UnknownTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + internal UnknownTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData) : base(odataType, name, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownTokenFilter() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.Serialization.cs new file mode 100644 index 000000000000..058b373d010e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownVectorQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorQuery)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + VectorQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorQuery(document.RootElement, options); + } + + internal static UnknownVectorQuery DeserializeUnknownVectorQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? k = default; + string fields = default; + bool? exhaustive = default; + double? oversampling = default; + float? weight = default; + VectorThreshold threshold = default; + string filterOverride = default; + VectorQueryKind kind = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("k"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("fields"u8)) + { + fields = property.Value.GetString(); + continue; + } + if (property.NameEquals("exhaustive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("oversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + oversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("weight"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + weight = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("threshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = VectorThreshold.DeserializeVectorThreshold(property.Value, options); + continue; + } + if (property.NameEquals("filterOverride"u8)) + { + filterOverride = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorQueryKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownVectorQuery( + k, + fields, + exhaustive, + oversampling, + weight, + threshold, + filterOverride, + kind, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownVectorQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownVectorQuery(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.cs new file mode 100644 index 000000000000..9cc7c8f0ac1e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorQuery.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of VectorQuery. + internal partial class UnknownVectorQuery : VectorQuery + { + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + internal UnknownVectorQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData) : base(kNearestNeighbors, fields, exhaustive, oversampling, weight, threshold, filterOverride, kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownVectorQuery() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs new file mode 100644 index 000000000000..d69f54ad9a0b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownVectorSearchAlgorithmConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + VectorSearchAlgorithmConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement, options); + } + + internal static UnknownVectorSearchAlgorithmConfiguration DeserializeUnknownVectorSearchAlgorithmConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + VectorSearchAlgorithmKind kind = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchAlgorithmKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownVectorSearchAlgorithmConfiguration(name, kind, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchAlgorithmConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownVectorSearchAlgorithmConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownVectorSearchAlgorithmConfiguration(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.cs new file mode 100644 index 000000000000..2f0b40f518af --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchAlgorithmConfiguration.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of VectorSearchAlgorithmConfiguration. + internal partial class UnknownVectorSearchAlgorithmConfiguration : VectorSearchAlgorithmConfiguration + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// Type of VectorSearchAlgorithmConfiguration. + /// Keeps track of any properties unknown to the library. + internal UnknownVectorSearchAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, IDictionary serializedAdditionalRawData) : base(name, kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownVectorSearchAlgorithmConfiguration() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.Serialization.cs new file mode 100644 index 000000000000..3cceb6017cbc --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.Serialization.cs @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownVectorSearchCompression : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + VectorSearchCompression IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchCompression(document.RootElement, options); + } + + internal static UnknownVectorSearchCompression DeserializeUnknownVectorSearchCompression(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + bool? rerankWithOriginalVectors = default; + double? defaultOversampling = default; + RescoringOptions rescoringOptions = default; + int? truncationDimension = default; + VectorSearchCompressionKind kind = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("rerankWithOriginalVectors"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rerankWithOriginalVectors = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("defaultOversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + defaultOversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("rescoringOptions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rescoringOptions = RescoringOptions.DeserializeRescoringOptions(property.Value, options); + continue; + } + if (property.NameEquals("truncationDimension"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + truncationDimension = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchCompressionKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownVectorSearchCompression( + name, + rerankWithOriginalVectors, + defaultOversampling, + rescoringOptions, + truncationDimension, + kind, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchCompression IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchCompression(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownVectorSearchCompression FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownVectorSearchCompression(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.cs new file mode 100644 index 000000000000..f576b1166512 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchCompression.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of VectorSearchCompression. + internal partial class UnknownVectorSearchCompression : VectorSearchCompression + { + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// + /// If set to true, once the ordered set of results calculated using compressed + /// vectors are obtained, they will be reranked again by recalculating the + /// full-precision similarity scores. This will improve recall at the expense of + /// latency. + /// + /// + /// Default oversampling factor. Oversampling will internally request more + /// documents (specified by this multiplier) in the initial search. This increases + /// the set of results that will be reranked using recomputed similarity scores + /// from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). + /// This parameter can only be set when rerankWithOriginalVectors is true. Higher + /// values improve recall at the expense of latency. + /// + /// Contains the options for rescoring. + /// + /// The number of dimensions to truncate the vectors to. Truncating the vectors + /// reduces the size of the vectors and the amount of data that needs to be + /// transferred during search. This can save storage cost and improve search + /// performance at the expense of recall. It should be only used for embeddings + /// trained with Matryoshka Representation Learning (MRL) such as OpenAI + /// text-embedding-3-large (small). The default value is null, which means no + /// truncation. + /// + /// Type of VectorSearchCompression. + /// Keeps track of any properties unknown to the library. + internal UnknownVectorSearchCompression(string compressionName, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension, VectorSearchCompressionKind kind, IDictionary serializedAdditionalRawData) : base(compressionName, rerankWithOriginalVectors, defaultOversampling, rescoringOptions, truncationDimension, kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownVectorSearchCompression() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.Serialization.cs new file mode 100644 index 000000000000..e6411ef450ec --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.Serialization.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownVectorSearchVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + VectorSearchVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchVectorizer(document.RootElement, options); + } + + internal static UnknownVectorSearchVectorizer DeserializeUnknownVectorSearchVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + VectorSearchVectorizerKind kind = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchVectorizerKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownVectorSearchVectorizer(name, kind, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownVectorSearchVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownVectorSearchVectorizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.cs new file mode 100644 index 000000000000..4fe5bf00a97d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorSearchVectorizer.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of VectorSearchVectorizer. + internal partial class UnknownVectorSearchVectorizer : VectorSearchVectorizer + { + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + internal UnknownVectorSearchVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData) : base(vectorizerName, kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownVectorSearchVectorizer() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.Serialization.cs new file mode 100644 index 000000000000..dec649b85ab9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.Serialization.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class UnknownVectorThreshold : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorThreshold)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + } + + VectorThreshold IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorThreshold)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorThreshold(document.RootElement, options); + } + + internal static UnknownVectorThreshold DeserializeUnknownVectorThreshold(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + VectorThresholdKind kind = "Unknown"; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("kind"u8)) + { + kind = new VectorThresholdKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UnknownVectorThreshold(kind, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorThreshold)} does not support writing '{options.Format}' format."); + } + } + + VectorThreshold IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorThreshold(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorThreshold)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new UnknownVectorThreshold FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeUnknownVectorThreshold(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.cs new file mode 100644 index 000000000000..0acfcbb1cc10 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/UnknownVectorThreshold.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Unknown version of VectorThreshold. + internal partial class UnknownVectorThreshold : VectorThreshold + { + /// Initializes a new instance of . + /// Type of threshold. + /// Keeps track of any properties unknown to the library. + internal UnknownVectorThreshold(VectorThresholdKind kind, IDictionary serializedAdditionalRawData) : base(kind, serializedAdditionalRawData) + { + } + + /// Initializes a new instance of for deserialization. + internal UnknownVectorThreshold() + { + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorEncodingFormat.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorEncodingFormat.cs similarity index 97% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorEncodingFormat.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorEncodingFormat.cs index 976fa62c6e9c..2c3928568d00 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorEncodingFormat.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorEncodingFormat.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The encoding format for interpreting vector field contents. public readonly partial struct VectorEncodingFormat : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorFilterMode.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorFilterMode.cs similarity index 83% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorFilterMode.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorFilterMode.cs index 9d5a1936f2ad..d02695d2da70 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorFilterMode.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorFilterMode.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { - /// Determines whether or not filters are applied before or after the vector search is performed. + /// + /// Determines whether or not filters are applied before or after the vector search + /// is performed. + /// public readonly partial struct VectorFilterMode : IEquatable { private readonly string _value; @@ -25,7 +28,11 @@ public VectorFilterMode(string value) private const string PostFilterValue = "postFilter"; private const string PreFilterValue = "preFilter"; - /// The filter will be applied after the candidate set of vector results is returned. Depending on the filter selectivity, this can result in fewer results than requested by the parameter 'k'. + /// + /// The filter will be applied after the candidate set of vector results is + /// returned. Depending on the filter selectivity, this can result in fewer results + /// than requested by the parameter 'k'. + /// public static VectorFilterMode PostFilter { get; } = new VectorFilterMode(PostFilterValue); /// The filter will be applied before the search query. public static VectorFilterMode PreFilter { get; } = new VectorFilterMode(PreFilterValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.Serialization.cs new file mode 100644 index 000000000000..d7f1bde9325f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.Serialization.cs @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownVectorQuery))] + internal partial class VectorQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorQuery)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(KNearestNeighbors)) + { + writer.WritePropertyName("k"u8); + writer.WriteNumberValue(KNearestNeighbors.Value); + } + if (Optional.IsDefined(Fields)) + { + writer.WritePropertyName("fields"u8); + writer.WriteStringValue(Fields); + } + if (Optional.IsDefined(Exhaustive)) + { + writer.WritePropertyName("exhaustive"u8); + writer.WriteBooleanValue(Exhaustive.Value); + } + if (Optional.IsDefined(Oversampling)) + { + writer.WritePropertyName("oversampling"u8); + writer.WriteNumberValue(Oversampling.Value); + } + if (Optional.IsDefined(Weight)) + { + writer.WritePropertyName("weight"u8); + writer.WriteNumberValue(Weight.Value); + } + if (Optional.IsDefined(Threshold)) + { + writer.WritePropertyName("threshold"u8); + writer.WriteObjectValue(Threshold, options); + } + if (Optional.IsDefined(FilterOverride)) + { + writer.WritePropertyName("filterOverride"u8); + writer.WriteStringValue(FilterOverride); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorQuery(document.RootElement, options); + } + + internal static VectorQuery DeserializeVectorQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "imageBinary": return VectorizableImageBinaryQuery.DeserializeVectorizableImageBinaryQuery(element, options); + case "imageUrl": return VectorizableImageUrlQuery.DeserializeVectorizableImageUrlQuery(element, options); + case "text": return VectorizableTextQuery.DeserializeVectorizableTextQuery(element, options); + case "vector": return VectorizedQuery.DeserializeVectorizedQuery(element, options); + } + } + return UnknownVectorQuery.DeserializeUnknownVectorQuery(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorQuery(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.cs new file mode 100644 index 000000000000..b8828bcb78ab --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorQuery.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The query parameters for vector and hybrid search queries. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + internal abstract partial class VectorQuery + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected VectorQuery() + { + } + + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + internal VectorQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData) + { + KNearestNeighbors = kNearestNeighbors; + Fields = fields; + Exhaustive = exhaustive; + Oversampling = oversampling; + Weight = weight; + Threshold = threshold; + FilterOverride = filterOverride; + Kind = kind; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Number of nearest neighbors to return as top hits. + public int? KNearestNeighbors { get; set; } + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + public string Fields { get; set; } + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + public bool? Exhaustive { get; set; } + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + public double? Oversampling { get; set; } + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + public float? Weight { get; set; } + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public VectorThreshold Threshold { get; set; } + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + public string FilterOverride { get; set; } + /// Type of query. + internal VectorQueryKind Kind { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQueryKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorQueryKind.cs similarity index 88% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQueryKind.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorQueryKind.cs index af7a06c12c8d..eb866527cb46 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorQueryKind.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorQueryKind.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// The kind of vector query being performed. internal readonly partial struct VectorQueryKind : IEquatable @@ -31,9 +31,15 @@ public VectorQueryKind(string value) public static VectorQueryKind Vector { get; } = new VectorQueryKind(VectorValue); /// Vector query where a text value that needs to be vectorized is provided. public static VectorQueryKind Text { get; } = new VectorQueryKind(TextValue); - /// Vector query where an url that represents an image value that needs to be vectorized is provided. + /// + /// Vector query where an url that represents an image value that needs to be + /// vectorized is provided. + /// public static VectorQueryKind ImageUrl { get; } = new VectorQueryKind(ImageUrlValue); - /// Vector query where a base 64 encoded binary of an image that needs to be vectorized is provided. + /// + /// Vector query where a base 64 encoded binary of an image that needs to be + /// vectorized is provided. + /// public static VectorQueryKind ImageBinary { get; } = new VectorQueryKind(ImageBinaryValue); /// Determines if two values are the same. public static bool operator ==(VectorQueryKind left, VectorQueryKind right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.Serialization.cs new file mode 100644 index 000000000000..c8ad5a7393f4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.Serialization.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class VectorSearch : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearch)} does not support writing '{format}' format."); + } + + if (Optional.IsCollectionDefined(Profiles)) + { + writer.WritePropertyName("profiles"u8); + writer.WriteStartArray(); + foreach (var item in Profiles) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Algorithms)) + { + writer.WritePropertyName("algorithms"u8); + writer.WriteStartArray(); + foreach (var item in Algorithms) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Vectorizers)) + { + writer.WritePropertyName("vectorizers"u8); + writer.WriteStartArray(); + foreach (var item in Vectorizers) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Compressions)) + { + writer.WritePropertyName("compressions"u8); + writer.WriteStartArray(); + foreach (var item in Compressions) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorSearch IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearch)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearch(document.RootElement, options); + } + + internal static VectorSearch DeserializeVectorSearch(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList profiles = default; + IList algorithms = default; + IList vectorizers = default; + IList compressions = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("profiles"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(VectorSearchProfile.DeserializeVectorSearchProfile(item, options)); + } + profiles = array; + continue; + } + if (property.NameEquals("algorithms"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(VectorSearchAlgorithmConfiguration.DeserializeVectorSearchAlgorithmConfiguration(item, options)); + } + algorithms = array; + continue; + } + if (property.NameEquals("vectorizers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(VectorSearchVectorizer.DeserializeVectorSearchVectorizer(item, options)); + } + vectorizers = array; + continue; + } + if (property.NameEquals("compressions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(VectorSearchCompression.DeserializeVectorSearchCompression(item, options)); + } + compressions = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorSearch(profiles ?? new ChangeTrackingList(), algorithms ?? new ChangeTrackingList(), vectorizers ?? new ChangeTrackingList(), compressions ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearch)} does not support writing '{options.Format}' format."); + } + } + + VectorSearch IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearch(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearch)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorSearch FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearch(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.cs new file mode 100644 index 000000000000..660904c99156 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearch.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Contains configuration options related to vector search. + public partial class VectorSearch + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public VectorSearch() + { + Profiles = new ChangeTrackingList(); + Algorithms = new ChangeTrackingList(); + Vectorizers = new ChangeTrackingList(); + Compressions = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Defines combinations of configurations to use with vector search. + /// + /// Contains configuration options specific to the algorithm used during indexing + /// or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// Contains configuration options on how to vectorize text vector queries. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// Contains configuration options specific to the compression method used during + /// indexing or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Keeps track of any properties unknown to the library. + internal VectorSearch(IList profiles, IList algorithms, IList vectorizers, IList compressions, IDictionary serializedAdditionalRawData) + { + Profiles = profiles; + Algorithms = algorithms; + Vectorizers = vectorizers; + Compressions = compressions; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Defines combinations of configurations to use with vector search. + public IList Profiles { get; } + /// + /// Contains configuration options specific to the algorithm used during indexing + /// or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public IList Algorithms { get; } + /// + /// Contains configuration options on how to vectorize text vector queries. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public IList Vectorizers { get; } + /// + /// Contains configuration options specific to the compression method used during + /// indexing or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public IList Compressions { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.Serialization.cs new file mode 100644 index 000000000000..f03527388734 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.Serialization.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownVectorSearchAlgorithmConfiguration))] + public partial class VectorSearchAlgorithmConfiguration : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorSearchAlgorithmConfiguration IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement, options); + } + + internal static VectorSearchAlgorithmConfiguration DeserializeVectorSearchAlgorithmConfiguration(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "exhaustiveKnn": return ExhaustiveKnnAlgorithmConfiguration.DeserializeExhaustiveKnnAlgorithmConfiguration(element, options); + case "hnsw": return HnswAlgorithmConfiguration.DeserializeHnswAlgorithmConfiguration(element, options); + } + } + return UnknownVectorSearchAlgorithmConfiguration.DeserializeUnknownVectorSearchAlgorithmConfiguration(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchAlgorithmConfiguration IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchAlgorithmConfiguration)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorSearchAlgorithmConfiguration FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchAlgorithmConfiguration(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.cs new file mode 100644 index 000000000000..0a4cea58425c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmConfiguration.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the algorithm used during indexing + /// or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class VectorSearchAlgorithmConfiguration + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + protected VectorSearchAlgorithmConfiguration(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// Type of VectorSearchAlgorithmConfiguration. + /// Keeps track of any properties unknown to the library. + internal VectorSearchAlgorithmConfiguration(string name, VectorSearchAlgorithmKind kind, IDictionary serializedAdditionalRawData) + { + Name = name; + Kind = kind; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VectorSearchAlgorithmConfiguration() + { + } + + /// The name to associate with this particular configuration. + public string Name { get; set; } + /// Type of VectorSearchAlgorithmConfiguration. + internal VectorSearchAlgorithmKind Kind { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmKind.cs similarity index 89% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmKind.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmKind.cs index f0fa35bad236..de2cd0d57b91 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchAlgorithmKind.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmKind.cs @@ -8,10 +8,10 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The algorithm used for indexing and querying. - internal readonly partial struct VectorSearchAlgorithmKind : IEquatable + public readonly partial struct VectorSearchAlgorithmKind : IEquatable { private readonly string _value; @@ -25,7 +25,10 @@ public VectorSearchAlgorithmKind(string value) private const string HnswValue = "hnsw"; private const string ExhaustiveKnnValue = "exhaustiveKnn"; - /// HNSW (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. + /// + /// HNSW (Hierarchical Navigable Small World), a type of approximate nearest + /// neighbors algorithm. + /// public static VectorSearchAlgorithmKind Hnsw { get; } = new VectorSearchAlgorithmKind(HnswValue); /// Exhaustive KNN algorithm which will perform brute-force search. public static VectorSearchAlgorithmKind ExhaustiveKnn { get; } = new VectorSearchAlgorithmKind(ExhaustiveKnnValue); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmMetric.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmMetric.cs new file mode 100644 index 000000000000..93b0870f6c91 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchAlgorithmMetric.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Search.Documents +{ + /// + /// The similarity metric to use for vector comparisons. It is recommended to + /// choose the same similarity metric as the embedding model was trained on. + /// + public readonly partial struct VectorSearchAlgorithmMetric : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public VectorSearchAlgorithmMetric(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string CosineValue = "cosine"; + private const string EuclideanValue = "euclidean"; + private const string DotProductValue = "dotProduct"; + private const string HammingValue = "hamming"; + + /// + /// Measures the angle between vectors to quantify their similarity, disregarding + /// magnitude. The smaller the angle, the closer the similarity. + /// + public static VectorSearchAlgorithmMetric Cosine { get; } = new VectorSearchAlgorithmMetric(CosineValue); + /// + /// Computes the straight-line distance between vectors in a multi-dimensional + /// space. The smaller the distance, the closer the similarity. + /// + public static VectorSearchAlgorithmMetric Euclidean { get; } = new VectorSearchAlgorithmMetric(EuclideanValue); + /// + /// Calculates the sum of element-wise products to gauge alignment and magnitude + /// similarity. The larger and more positive, the closer the similarity. + /// + public static VectorSearchAlgorithmMetric DotProduct { get; } = new VectorSearchAlgorithmMetric(DotProductValue); + /// + /// Only applicable to bit-packed binary data types. Determines dissimilarity by + /// counting differing positions in binary vectors. The fewer differences, the + /// closer the similarity. + /// + public static VectorSearchAlgorithmMetric Hamming { get; } = new VectorSearchAlgorithmMetric(HammingValue); + /// Determines if two values are the same. + public static bool operator ==(VectorSearchAlgorithmMetric left, VectorSearchAlgorithmMetric right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(VectorSearchAlgorithmMetric left, VectorSearchAlgorithmMetric right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator VectorSearchAlgorithmMetric(string value) => new VectorSearchAlgorithmMetric(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is VectorSearchAlgorithmMetric other && Equals(other); + /// + public bool Equals(VectorSearchAlgorithmMetric other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.Serialization.cs new file mode 100644 index 000000000000..54b6808b683f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.Serialization.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownVectorSearchCompression))] + public partial class VectorSearchCompression : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(CompressionName); + if (Optional.IsDefined(RerankWithOriginalVectors)) + { + writer.WritePropertyName("rerankWithOriginalVectors"u8); + writer.WriteBooleanValue(RerankWithOriginalVectors.Value); + } + if (Optional.IsDefined(DefaultOversampling)) + { + writer.WritePropertyName("defaultOversampling"u8); + writer.WriteNumberValue(DefaultOversampling.Value); + } + if (Optional.IsDefined(RescoringOptions)) + { + writer.WritePropertyName("rescoringOptions"u8); + writer.WriteObjectValue(RescoringOptions, options); + } + if (Optional.IsDefined(TruncationDimension)) + { + writer.WritePropertyName("truncationDimension"u8); + writer.WriteNumberValue(TruncationDimension.Value); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorSearchCompression IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchCompression(document.RootElement, options); + } + + internal static VectorSearchCompression DeserializeVectorSearchCompression(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "binaryQuantization": return BinaryQuantizationCompression.DeserializeBinaryQuantizationCompression(element, options); + case "scalarQuantization": return ScalarQuantizationCompression.DeserializeScalarQuantizationCompression(element, options); + } + } + return UnknownVectorSearchCompression.DeserializeUnknownVectorSearchCompression(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchCompression IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchCompression(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchCompression)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorSearchCompression FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchCompression(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.cs new file mode 100644 index 000000000000..02d58ff7bd6d --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompression.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Contains configuration options specific to the compression method used during + /// indexing or querying. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public abstract partial class VectorSearchCompression + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// is null. + protected VectorSearchCompression(string compressionName) + { + Argument.AssertNotNull(compressionName, nameof(compressionName)); + + CompressionName = compressionName; + } + + /// Initializes a new instance of . + /// The name to associate with this particular configuration. + /// + /// If set to true, once the ordered set of results calculated using compressed + /// vectors are obtained, they will be reranked again by recalculating the + /// full-precision similarity scores. This will improve recall at the expense of + /// latency. + /// + /// + /// Default oversampling factor. Oversampling will internally request more + /// documents (specified by this multiplier) in the initial search. This increases + /// the set of results that will be reranked using recomputed similarity scores + /// from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). + /// This parameter can only be set when rerankWithOriginalVectors is true. Higher + /// values improve recall at the expense of latency. + /// + /// Contains the options for rescoring. + /// + /// The number of dimensions to truncate the vectors to. Truncating the vectors + /// reduces the size of the vectors and the amount of data that needs to be + /// transferred during search. This can save storage cost and improve search + /// performance at the expense of recall. It should be only used for embeddings + /// trained with Matryoshka Representation Learning (MRL) such as OpenAI + /// text-embedding-3-large (small). The default value is null, which means no + /// truncation. + /// + /// Type of VectorSearchCompression. + /// Keeps track of any properties unknown to the library. + internal VectorSearchCompression(string compressionName, bool? rerankWithOriginalVectors, double? defaultOversampling, RescoringOptions rescoringOptions, int? truncationDimension, VectorSearchCompressionKind kind, IDictionary serializedAdditionalRawData) + { + CompressionName = compressionName; + RerankWithOriginalVectors = rerankWithOriginalVectors; + DefaultOversampling = defaultOversampling; + RescoringOptions = rescoringOptions; + TruncationDimension = truncationDimension; + Kind = kind; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VectorSearchCompression() + { + } + + /// The name to associate with this particular configuration. + public string CompressionName { get; set; } + /// + /// If set to true, once the ordered set of results calculated using compressed + /// vectors are obtained, they will be reranked again by recalculating the + /// full-precision similarity scores. This will improve recall at the expense of + /// latency. + /// + public bool? RerankWithOriginalVectors { get; set; } + /// + /// Default oversampling factor. Oversampling will internally request more + /// documents (specified by this multiplier) in the initial search. This increases + /// the set of results that will be reranked using recomputed similarity scores + /// from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). + /// This parameter can only be set when rerankWithOriginalVectors is true. Higher + /// values improve recall at the expense of latency. + /// + public double? DefaultOversampling { get; set; } + /// Contains the options for rescoring. + public RescoringOptions RescoringOptions { get; set; } + /// + /// The number of dimensions to truncate the vectors to. Truncating the vectors + /// reduces the size of the vectors and the amount of data that needs to be + /// transferred during search. This can save storage cost and improve search + /// performance at the expense of recall. It should be only used for embeddings + /// trained with Matryoshka Representation Learning (MRL) such as OpenAI + /// text-embedding-3-large (small). The default value is null, which means no + /// truncation. + /// + public int? TruncationDimension { get; set; } + /// Type of VectorSearchCompression. + internal VectorSearchCompressionKind Kind { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionKind.cs similarity index 77% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionKind.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionKind.cs index 25c669976b64..e9fec49b1fef 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionKind.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionKind.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The compression method used for indexing and querying. internal readonly partial struct VectorSearchCompressionKind : IEquatable @@ -25,9 +25,19 @@ public VectorSearchCompressionKind(string value) private const string ScalarQuantizationValue = "scalarQuantization"; private const string BinaryQuantizationValue = "binaryQuantization"; - /// Scalar Quantization, a type of compression method. In scalar quantization, the original vectors values are compressed to a narrower type by discretizing and representing each component of a vector using a reduced set of quantized values, thereby reducing the overall data size. + /// + /// Scalar Quantization, a type of compression method. In scalar quantization, the + /// original vectors values are compressed to a narrower type by discretizing and + /// representing each component of a vector using a reduced set of quantized + /// values, thereby reducing the overall data size. + /// public static VectorSearchCompressionKind ScalarQuantization { get; } = new VectorSearchCompressionKind(ScalarQuantizationValue); - /// Binary Quantization, a type of compression method. In binary quantization, the original vectors values are compressed to the narrower binary type by discretizing and representing each component of a vector using binary values, thereby reducing the overall data size. + /// + /// Binary Quantization, a type of compression method. In binary quantization, the + /// original vectors values are compressed to the narrower binary type by discretizing + /// and representing each component of a vector using binary values, + /// thereby reducing the overall data size. + /// public static VectorSearchCompressionKind BinaryQuantization { get; } = new VectorSearchCompressionKind(BinaryQuantizationValue); /// Determines if two values are the same. public static bool operator ==(VectorSearchCompressionKind left, VectorSearchCompressionKind right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionRescoreStorageMethod.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionRescoreStorageMethod.cs similarity index 77% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionRescoreStorageMethod.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionRescoreStorageMethod.cs index 6a745f14d47c..52f4492ff01e 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionRescoreStorageMethod.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionRescoreStorageMethod.cs @@ -8,9 +8,12 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { - /// The storage method for the original full-precision vectors used for rescoring and internal index operations. + /// + /// The storage method for the original full-precision vectors used for rescoring + /// and internal index operations. + /// public readonly partial struct VectorSearchCompressionRescoreStorageMethod : IEquatable { private readonly string _value; @@ -25,9 +28,17 @@ public VectorSearchCompressionRescoreStorageMethod(string value) private const string PreserveOriginalsValue = "preserveOriginals"; private const string DiscardOriginalsValue = "discardOriginals"; - /// This option preserves the original full-precision vectors. Choose this option for maximum flexibility and highest quality of compressed search results. This consumes more storage but allows for rescoring and oversampling. + /// + /// This option preserves the original full-precision vectors. Choose this option + /// for maximum flexibility and highest quality of compressed search results. This + /// consumes more storage but allows for rescoring and oversampling. + /// public static VectorSearchCompressionRescoreStorageMethod PreserveOriginals { get; } = new VectorSearchCompressionRescoreStorageMethod(PreserveOriginalsValue); - /// This option discards the original full-precision vectors. Choose this option for maximum storage savings. Since this option does not allow for rescoring and oversampling, it will often cause slight to moderate reductions in quality. + /// + /// This option discards the original full-precision vectors. Choose this option + /// for maximum storage savings. Since this option does not allow for rescoring and + /// oversampling, it will often cause slight to moderate reductions in quality. + /// public static VectorSearchCompressionRescoreStorageMethod DiscardOriginals { get; } = new VectorSearchCompressionRescoreStorageMethod(DiscardOriginalsValue); /// Determines if two values are the same. public static bool operator ==(VectorSearchCompressionRescoreStorageMethod left, VectorSearchCompressionRescoreStorageMethod right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionTarget.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionTarget.cs similarity index 96% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionTarget.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionTarget.cs index 030e776fc59c..c9b3cdfcd984 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompressionTarget.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchCompressionTarget.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The quantized data type of compressed vector values. public readonly partial struct VectorSearchCompressionTarget : IEquatable @@ -24,7 +24,7 @@ public VectorSearchCompressionTarget(string value) private const string Int8Value = "int8"; - /// int8. + /// 8-bit signed integer. public static VectorSearchCompressionTarget Int8 { get; } = new VectorSearchCompressionTarget(Int8Value); /// Determines if two values are the same. public static bool operator ==(VectorSearchCompressionTarget left, VectorSearchCompressionTarget right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.Serialization.cs new file mode 100644 index 000000000000..846bc6135457 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class VectorSearchProfile : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchProfile)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + writer.WritePropertyName("algorithm"u8); + writer.WriteStringValue(AlgorithmConfigurationName); + if (Optional.IsDefined(VectorizerName)) + { + writer.WritePropertyName("vectorizer"u8); + writer.WriteStringValue(VectorizerName); + } + if (Optional.IsDefined(CompressionName)) + { + writer.WritePropertyName("compression"u8); + writer.WriteStringValue(CompressionName); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorSearchProfile IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchProfile)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchProfile(document.RootElement, options); + } + + internal static VectorSearchProfile DeserializeVectorSearchProfile(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string algorithm = default; + string vectorizer = default; + string compression = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("algorithm"u8)) + { + algorithm = property.Value.GetString(); + continue; + } + if (property.NameEquals("vectorizer"u8)) + { + vectorizer = property.Value.GetString(); + continue; + } + if (property.NameEquals("compression"u8)) + { + compression = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorSearchProfile(name, algorithm, vectorizer, compression, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchProfile)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchProfile IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchProfile(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchProfile)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorSearchProfile FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchProfile(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.cs new file mode 100644 index 000000000000..507e71d2499c --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchProfile.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Defines a combination of configurations to use with vector search. + public partial class VectorSearchProfile + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name to associate with this particular vector search profile. + /// + /// The name of the vector search algorithm configuration that specifies the + /// algorithm and optional parameters. + /// + /// or is null. + public VectorSearchProfile(string name, string algorithmConfigurationName) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(algorithmConfigurationName, nameof(algorithmConfigurationName)); + + Name = name; + AlgorithmConfigurationName = algorithmConfigurationName; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vector search profile. + /// + /// The name of the vector search algorithm configuration that specifies the + /// algorithm and optional parameters. + /// + /// The name of the vectorization being configured for use with vector search. + /// + /// The name of the compression method configuration that specifies the compression + /// method and optional parameters. + /// + /// Keeps track of any properties unknown to the library. + internal VectorSearchProfile(string name, string algorithmConfigurationName, string vectorizerName, string compressionName, IDictionary serializedAdditionalRawData) + { + Name = name; + AlgorithmConfigurationName = algorithmConfigurationName; + VectorizerName = vectorizerName; + CompressionName = compressionName; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VectorSearchProfile() + { + } + + /// The name to associate with this particular vector search profile. + public string Name { get; set; } + /// + /// The name of the vector search algorithm configuration that specifies the + /// algorithm and optional parameters. + /// + public string AlgorithmConfigurationName { get; set; } + /// The name of the vectorization being configured for use with vector search. + public string VectorizerName { get; set; } + /// + /// The name of the compression method configuration that specifies the compression + /// method and optional parameters. + /// + public string CompressionName { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.Serialization.cs new file mode 100644 index 000000000000..5b9742f2f645 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.Serialization.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownVectorSearchVectorizer))] + public partial class VectorSearchVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("name"u8); + writer.WriteStringValue(VectorizerName); + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorSearchVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSearchVectorizer(document.RootElement, options); + } + + internal static VectorSearchVectorizer DeserializeVectorSearchVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "aiServicesVision": return AIServicesVisionVectorizer.DeserializeAIServicesVisionVectorizer(element, options); + case "aml": return AzureMachineLearningVectorizer.DeserializeAzureMachineLearningVectorizer(element, options); + case "azureOpenAI": return AzureOpenAIVectorizer.DeserializeAzureOpenAIVectorizer(element, options); + case "customWebApi": return WebApiVectorizer.DeserializeWebApiVectorizer(element, options); + } + } + return UnknownVectorSearchVectorizer.DeserializeUnknownVectorSearchVectorizer(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support writing '{options.Format}' format."); + } + } + + VectorSearchVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSearchVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorSearchVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSearchVectorizer(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.cs new file mode 100644 index 000000000000..c7a0800334ef --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizer.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Specifies the vectorization method to be used during query time. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + public abstract partial class VectorSearchVectorizer + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// is null. + protected VectorSearchVectorizer(string vectorizerName) + { + Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); + + VectorizerName = vectorizerName; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + internal VectorSearchVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData) + { + VectorizerName = vectorizerName; + Kind = kind; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VectorSearchVectorizer() + { + } + + /// The name to associate with this particular vectorization method. + public string VectorizerName { get; set; } + /// Type of VectorSearchVectorizer. + internal VectorSearchVectorizerKind Kind { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizerKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizerKind.cs similarity index 85% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizerKind.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizerKind.cs index bef6730ac985..22f90ff3e68a 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchVectorizerKind.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSearchVectorizerKind.cs @@ -8,10 +8,10 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The vectorization method to be used during query time. - internal readonly partial struct VectorSearchVectorizerKind : IEquatable + public readonly partial struct VectorSearchVectorizerKind : IEquatable { private readonly string _value; @@ -31,9 +31,15 @@ public VectorSearchVectorizerKind(string value) public static VectorSearchVectorizerKind AzureOpenAI { get; } = new VectorSearchVectorizerKind(AzureOpenAIValue); /// Generate embeddings using a custom web endpoint at query time. public static VectorSearchVectorizerKind CustomWebApi { get; } = new VectorSearchVectorizerKind(CustomWebApiValue); - /// Generate embeddings for an image or text input at query time using the Azure AI Services Vision Vectorize API. + /// + /// Generate embeddings for an image or text input at query time using the Azure AI + /// Services Vision Vectorize API. + /// public static VectorSearchVectorizerKind AIServicesVision { get; } = new VectorSearchVectorizerKind(AIServicesVisionValue); - /// Generate embeddings using an Azure Machine Learning endpoint deployed via the Azure AI Studio Model Catalog at query time. + /// + /// Generate embeddings using an Azure Machine Learning endpoint deployed via the + /// Azure AI Foundry Model Catalog at query time. + /// public static VectorSearchVectorizerKind AML { get; } = new VectorSearchVectorizerKind(AMLValue); /// Determines if two values are the same. public static bool operator ==(VectorSearchVectorizerKind left, VectorSearchVectorizerKind right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.Serialization.cs new file mode 100644 index 000000000000..2e9b886ff13f --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class VectorSimilarityThreshold : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSimilarityThreshold)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("value"u8); + writer.WriteNumberValue(Value); + } + + VectorSimilarityThreshold IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorSimilarityThreshold)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorSimilarityThreshold(document.RootElement, options); + } + + internal static VectorSimilarityThreshold DeserializeVectorSimilarityThreshold(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + double value = default; + VectorThresholdKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value"u8)) + { + value = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorThresholdKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorSimilarityThreshold(kind, serializedAdditionalRawData, value); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorSimilarityThreshold)} does not support writing '{options.Format}' format."); + } + } + + VectorSimilarityThreshold IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSimilarityThreshold(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorSimilarityThreshold)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VectorSimilarityThreshold FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorSimilarityThreshold(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.cs new file mode 100644 index 000000000000..eb627034d494 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorSimilarityThreshold.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The results of the vector query will be filtered based on the vector similarity + /// metric. Note this is the canonical definition of similarity metric, not the 'distance' + /// version. The threshold direction (larger or smaller) will be chosen + /// automatically according to the metric used by the field. + /// + internal partial class VectorSimilarityThreshold : VectorThreshold + { + /// Initializes a new instance of . + /// + /// The threshold will filter based on the similarity metric value. Note this is + /// the canonical definition of similarity metric, not the 'distance' version. The + /// threshold direction (larger or smaller) will be chosen automatically according + /// to the metric used by the field. + /// + public VectorSimilarityThreshold(double value) + { + Kind = VectorThresholdKind.VectorSimilarity; + Value = value; + } + + /// Initializes a new instance of . + /// Type of threshold. + /// Keeps track of any properties unknown to the library. + /// + /// The threshold will filter based on the similarity metric value. Note this is + /// the canonical definition of similarity metric, not the 'distance' version. The + /// threshold direction (larger or smaller) will be chosen automatically according + /// to the metric used by the field. + /// + internal VectorSimilarityThreshold(VectorThresholdKind kind, IDictionary serializedAdditionalRawData, double value) : base(kind, serializedAdditionalRawData) + { + Value = value; + } + + /// Initializes a new instance of for deserialization. + internal VectorSimilarityThreshold() + { + } + + /// + /// The threshold will filter based on the similarity metric value. Note this is + /// the canonical definition of similarity metric, not the 'distance' version. The + /// threshold direction (larger or smaller) will be chosen automatically according + /// to the metric used by the field. + /// + public double Value { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.Serialization.cs new file mode 100644 index 000000000000..75a791afded4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + [PersistableModelProxy(typeof(UnknownVectorThreshold))] + internal partial class VectorThreshold : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorThreshold)} does not support writing '{format}' format."); + } + + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorThreshold IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorThreshold)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorThreshold(document.RootElement, options); + } + + internal static VectorThreshold DeserializeVectorThreshold(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind", out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "searchScore": return SearchScoreThreshold.DeserializeSearchScoreThreshold(element, options); + case "vectorSimilarity": return VectorSimilarityThreshold.DeserializeVectorSimilarityThreshold(element, options); + } + } + return UnknownVectorThreshold.DeserializeUnknownVectorThreshold(element, options); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorThreshold)} does not support writing '{options.Format}' format."); + } + } + + VectorThreshold IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorThreshold(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorThreshold)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorThreshold FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorThreshold(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.cs new file mode 100644 index 000000000000..09bf76867e17 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorThreshold.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The threshold used for vector queries. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + internal abstract partial class VectorThreshold + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private protected IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + protected VectorThreshold() + { + } + + /// Initializes a new instance of . + /// Type of threshold. + /// Keeps track of any properties unknown to the library. + internal VectorThreshold(VectorThresholdKind kind, IDictionary serializedAdditionalRawData) + { + Kind = kind; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Type of threshold. + internal VectorThresholdKind Kind { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThresholdKind.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorThresholdKind.cs similarity index 77% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThresholdKind.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VectorThresholdKind.cs index 0c16796e69ca..413b9b2f1b14 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorThresholdKind.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorThresholdKind.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Models +namespace Azure.Search.Documents { /// The kind of vector query being performed. internal readonly partial struct VectorThresholdKind : IEquatable @@ -25,9 +25,18 @@ public VectorThresholdKind(string value) private const string VectorSimilarityValue = "vectorSimilarity"; private const string SearchScoreValue = "searchScore"; - /// The results of the vector query will be filtered based on the vector similarity metric. Note this is the canonical definition of similarity metric, not the 'distance' version. The threshold direction (larger or smaller) will be chosen automatically according to the metric used by the field. + /// + /// The results of the vector query will be filtered based on the vector similarity + /// metric. Note this is the canonical definition of similarity metric, not the 'distance' + /// version. The threshold direction (larger or smaller) will be chosen + /// automatically according to the metric used by the field. + /// public static VectorThresholdKind VectorSimilarity { get; } = new VectorThresholdKind(VectorSimilarityValue); - /// The results of the vector query will filter based on the '@search.score' value. Note this is the @search.score returned as part of the search response. The threshold direction will be chosen for higher @search.score. + /// + /// The results of the vector query will filter based on the '@search.score' value. + /// Note this is the @search.score returned as part of the search response. The + /// threshold direction will be chosen for higher @search.score. + /// public static VectorThresholdKind SearchScore { get; } = new VectorThresholdKind(SearchScoreValue); /// Determines if two values are the same. public static bool operator ==(VectorThresholdKind left, VectorThresholdKind right) => left.Equals(right); diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.Serialization.cs new file mode 100644 index 000000000000..d50e11689786 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.Serialization.cs @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class VectorizableImageBinaryQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableImageBinaryQuery)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Base64Image)) + { + writer.WritePropertyName("base64Image"u8); + writer.WriteStringValue(Base64Image); + } + } + + VectorizableImageBinaryQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableImageBinaryQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorizableImageBinaryQuery(document.RootElement, options); + } + + internal static VectorizableImageBinaryQuery DeserializeVectorizableImageBinaryQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string base64Image = default; + int? k = default; + string fields = default; + bool? exhaustive = default; + double? oversampling = default; + float? weight = default; + VectorThreshold threshold = default; + string filterOverride = default; + VectorQueryKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("base64Image"u8)) + { + base64Image = property.Value.GetString(); + continue; + } + if (property.NameEquals("k"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("fields"u8)) + { + fields = property.Value.GetString(); + continue; + } + if (property.NameEquals("exhaustive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("oversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + oversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("weight"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + weight = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("threshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = VectorThreshold.DeserializeVectorThreshold(property.Value, options); + continue; + } + if (property.NameEquals("filterOverride"u8)) + { + filterOverride = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorQueryKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorizableImageBinaryQuery( + k, + fields, + exhaustive, + oversampling, + weight, + threshold, + filterOverride, + kind, + serializedAdditionalRawData, + base64Image); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorizableImageBinaryQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorizableImageBinaryQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableImageBinaryQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorizableImageBinaryQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VectorizableImageBinaryQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableImageBinaryQuery(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.cs new file mode 100644 index 000000000000..ce638ba4b54a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageBinaryQuery.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The query parameters to use for vector search when a base 64 encoded binary of + /// an image that needs to be vectorized is provided. + /// + internal partial class VectorizableImageBinaryQuery : VectorQuery + { + /// Initializes a new instance of . + public VectorizableImageBinaryQuery() + { + Kind = VectorQueryKind.ImageBinary; + } + + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + /// + /// The base 64 encoded binary of an image to be vectorized to perform a vector + /// search query. + /// + internal VectorizableImageBinaryQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData, string base64Image) : base(kNearestNeighbors, fields, exhaustive, oversampling, weight, threshold, filterOverride, kind, serializedAdditionalRawData) + { + Base64Image = base64Image; + } + + /// + /// The base 64 encoded binary of an image to be vectorized to perform a vector + /// search query. + /// + public string Base64Image { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.Serialization.cs new file mode 100644 index 000000000000..2c8f9884b1ac --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.Serialization.cs @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class VectorizableImageUrlQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableImageUrlQuery)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(Url)) + { + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url); + } + } + + VectorizableImageUrlQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableImageUrlQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorizableImageUrlQuery(document.RootElement, options); + } + + internal static VectorizableImageUrlQuery DeserializeVectorizableImageUrlQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string url = default; + int? k = default; + string fields = default; + bool? exhaustive = default; + double? oversampling = default; + float? weight = default; + VectorThreshold threshold = default; + string filterOverride = default; + VectorQueryKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = property.Value.GetString(); + continue; + } + if (property.NameEquals("k"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("fields"u8)) + { + fields = property.Value.GetString(); + continue; + } + if (property.NameEquals("exhaustive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("oversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + oversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("weight"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + weight = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("threshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = VectorThreshold.DeserializeVectorThreshold(property.Value, options); + continue; + } + if (property.NameEquals("filterOverride"u8)) + { + filterOverride = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorQueryKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorizableImageUrlQuery( + k, + fields, + exhaustive, + oversampling, + weight, + threshold, + filterOverride, + kind, + serializedAdditionalRawData, + url); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorizableImageUrlQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorizableImageUrlQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableImageUrlQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorizableImageUrlQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VectorizableImageUrlQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableImageUrlQuery(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.cs new file mode 100644 index 000000000000..a552cc5d4a69 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableImageUrlQuery.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The query parameters to use for vector search when an url that represents an + /// image value that needs to be vectorized is provided. + /// + internal partial class VectorizableImageUrlQuery : VectorQuery + { + /// Initializes a new instance of . + public VectorizableImageUrlQuery() + { + Kind = VectorQueryKind.ImageUrl; + } + + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + /// The URL of an image to be vectorized to perform a vector search query. + internal VectorizableImageUrlQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData, string url) : base(kNearestNeighbors, fields, exhaustive, oversampling, weight, threshold, filterOverride, kind, serializedAdditionalRawData) + { + Url = url; + } + + /// The URL of an image to be vectorized to perform a vector search query. + public string Url { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.Serialization.cs new file mode 100644 index 000000000000..d455a283f714 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.Serialization.cs @@ -0,0 +1,222 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class VectorizableTextQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableTextQuery)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (Optional.IsDefined(QueryRewrites)) + { + writer.WritePropertyName("queryRewrites"u8); + writer.WriteStringValue(QueryRewrites.Value.ToString()); + } + } + + VectorizableTextQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizableTextQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorizableTextQuery(document.RootElement, options); + } + + internal static VectorizableTextQuery DeserializeVectorizableTextQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + QueryRewritesType? queryRewrites = default; + int? k = default; + string fields = default; + bool? exhaustive = default; + double? oversampling = default; + float? weight = default; + VectorThreshold threshold = default; + string filterOverride = default; + VectorQueryKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (property.NameEquals("queryRewrites"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + queryRewrites = new QueryRewritesType(property.Value.GetString()); + continue; + } + if (property.NameEquals("k"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("fields"u8)) + { + fields = property.Value.GetString(); + continue; + } + if (property.NameEquals("exhaustive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("oversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + oversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("weight"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + weight = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("threshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = VectorThreshold.DeserializeVectorThreshold(property.Value, options); + continue; + } + if (property.NameEquals("filterOverride"u8)) + { + filterOverride = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorQueryKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorizableTextQuery( + k, + fields, + exhaustive, + oversampling, + weight, + threshold, + filterOverride, + kind, + serializedAdditionalRawData, + text, + queryRewrites); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorizableTextQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorizableTextQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableTextQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorizableTextQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VectorizableTextQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizableTextQuery(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.cs new file mode 100644 index 000000000000..c90a208908c9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizableTextQuery.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// The query parameters to use for vector search when a text value that needs to + /// be vectorized is provided. + /// + internal partial class VectorizableTextQuery : VectorQuery + { + /// Initializes a new instance of . + /// The text to be vectorized to perform a vector search query. + /// is null. + public VectorizableTextQuery(string text) + { + Argument.AssertNotNull(text, nameof(text)); + + Kind = VectorQueryKind.Text; + Text = text; + } + + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + /// The text to be vectorized to perform a vector search query. + /// + /// Can be configured to let a generative model rewrite the query before sending it + /// to be vectorized. + /// + internal VectorizableTextQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData, string text, QueryRewritesType? queryRewrites) : base(kNearestNeighbors, fields, exhaustive, oversampling, weight, threshold, filterOverride, kind, serializedAdditionalRawData) + { + Text = text; + QueryRewrites = queryRewrites; + } + + /// Initializes a new instance of for deserialization. + internal VectorizableTextQuery() + { + } + + /// The text to be vectorized to perform a vector search query. + public string Text { get; set; } + /// + /// Can be configured to let a generative model rewrite the query before sending it + /// to be vectorized. + /// + public QueryRewritesType? QueryRewrites { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.Serialization.cs new file mode 100644 index 000000000000..86ece81141db --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.Serialization.cs @@ -0,0 +1,216 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + internal partial class VectorizedQuery : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizedQuery)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("vector"u8); + writer.WriteStartArray(); + foreach (var item in Vector) + { + writer.WriteNumberValue(item); + } + writer.WriteEndArray(); + } + + VectorizedQuery IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorizedQuery)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorizedQuery(document.RootElement, options); + } + + internal static VectorizedQuery DeserializeVectorizedQuery(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList vector = default; + int? k = default; + string fields = default; + bool? exhaustive = default; + double? oversampling = default; + float? weight = default; + VectorThreshold threshold = default; + string filterOverride = default; + VectorQueryKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("vector"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetSingle()); + } + vector = array; + continue; + } + if (property.NameEquals("k"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + k = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("fields"u8)) + { + fields = property.Value.GetString(); + continue; + } + if (property.NameEquals("exhaustive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exhaustive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("oversampling"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + oversampling = property.Value.GetDouble(); + continue; + } + if (property.NameEquals("weight"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + weight = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("threshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + threshold = VectorThreshold.DeserializeVectorThreshold(property.Value, options); + continue; + } + if (property.NameEquals("filterOverride"u8)) + { + filterOverride = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorQueryKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorizedQuery( + k, + fields, + exhaustive, + oversampling, + weight, + threshold, + filterOverride, + kind, + serializedAdditionalRawData, + vector); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorizedQuery)} does not support writing '{options.Format}' format."); + } + } + + VectorizedQuery IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizedQuery(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorizedQuery)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VectorizedQuery FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorizedQuery(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.cs new file mode 100644 index 000000000000..694251a82586 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorizedQuery.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.Search.Documents +{ + /// + /// The query parameters to use for vector search when a raw vector value is + /// provided. + /// + internal partial class VectorizedQuery : VectorQuery + { + /// Initializes a new instance of . + /// The vector representation of a search query. + /// is null. + public VectorizedQuery(IEnumerable vector) + { + Argument.AssertNotNull(vector, nameof(vector)); + + Kind = VectorQueryKind.Vector; + Vector = vector.ToList(); + } + + /// Initializes a new instance of . + /// Number of nearest neighbors to return as top hits. + /// + /// Vector Fields of type Collection(Edm.Single) to be included in the vector + /// searched. + /// + /// + /// When true, triggers an exhaustive k-nearest neighbor search across all vectors + /// within the vector index. Useful for scenarios where exact matches are critical, + /// such as determining ground truth values. + /// + /// + /// Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' + /// parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' + /// is true. This parameter is only permitted when a + /// compression method is used on the underlying vector field. + /// + /// + /// Relative weight of the vector query when compared to other vector query and/or + /// the text query within the same search request. This value is used when + /// combining the results of multiple ranking lists produced by the different + /// vector queries and/or the results retrieved through the text query. The higher + /// the weight, the higher the documents that matched that query will be in the + /// final ranking. Default is 1.0 and the value needs to be a positive number + /// larger than zero. + /// + /// + /// The threshold used for vector queries. Note this can only be set if all 'fields' use the same similarity metric. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The OData filter expression to apply to this specific vector query. If no + /// filter expression is defined at the vector level, the expression defined in the + /// top level filter parameter is used instead. + /// + /// Type of query. + /// Keeps track of any properties unknown to the library. + /// The vector representation of a search query. + internal VectorizedQuery(int? kNearestNeighbors, string fields, bool? exhaustive, double? oversampling, float? weight, VectorThreshold threshold, string filterOverride, VectorQueryKind kind, IDictionary serializedAdditionalRawData, IList vector) : base(kNearestNeighbors, fields, exhaustive, oversampling, weight, threshold, filterOverride, kind, serializedAdditionalRawData) + { + Vector = vector; + } + + /// Initializes a new instance of for deserialization. + internal VectorizedQuery() + { + } + + /// The vector representation of a search query. + public IList Vector { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.Serialization.cs new file mode 100644 index 000000000000..c3c1a4296fc8 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class VectorsDebugInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorsDebugInfo)} does not support writing '{format}' format."); + } + + if (options.Format != "W" && Optional.IsDefined(Subscores)) + { + writer.WritePropertyName("subscores"u8); + writer.WriteObjectValue(Subscores, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + VectorsDebugInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VectorsDebugInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVectorsDebugInfo(document.RootElement, options); + } + + internal static VectorsDebugInfo DeserializeVectorsDebugInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + QueryResultDocumentSubscores subscores = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("subscores"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + subscores = QueryResultDocumentSubscores.DeserializeQueryResultDocumentSubscores(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VectorsDebugInfo(subscores, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VectorsDebugInfo)} does not support writing '{options.Format}' format."); + } + } + + VectorsDebugInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorsDebugInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VectorsDebugInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VectorsDebugInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVectorsDebugInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.cs b/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.cs new file mode 100644 index 000000000000..95f894f41b4a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VectorsDebugInfo.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// "Contains debugging information specific to vector and hybrid search."). + public partial class VectorsDebugInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal VectorsDebugInfo() + { + } + + /// Initializes a new instance of . + /// + /// The breakdown of subscores of the document prior to the chosen result set + /// fusion/combination method such as RRF. + /// + /// Keeps track of any properties unknown to the library. + internal VectorsDebugInfo(QueryResultDocumentSubscores subscores, IDictionary serializedAdditionalRawData) + { + Subscores = subscores; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// + /// The breakdown of subscores of the document prior to the chosen result set + /// fusion/combination method such as RRF. + /// + public QueryResultDocumentSubscores Subscores { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.Serialization.cs new file mode 100644 index 000000000000..9fe3eb0d7173 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class VisionVectorizeSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VisionVectorizeSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("modelVersion"u8); + writer.WriteStringValue(ModelVersion); + } + + VisionVectorizeSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VisionVectorizeSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVisionVectorizeSkill(document.RootElement, options); + } + + internal static VisionVectorizeSkill DeserializeVisionVectorizeSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string modelVersion = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("modelVersion"u8)) + { + modelVersion = property.Value.GetString(); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VisionVectorizeSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + modelVersion); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VisionVectorizeSkill)} does not support writing '{options.Format}' format."); + } + } + + VisionVectorizeSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVisionVectorizeSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VisionVectorizeSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new VisionVectorizeSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeVisionVectorizeSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.cs new file mode 100644 index 000000000000..49dcbb965875 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/VisionVectorizeSkill.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Allows you to generate a vector embedding for a given image or text input using + /// the Azure AI Services Vision Vectorize API. + /// + public partial class VisionVectorizeSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + /// , or is null. + public VisionVectorizeSkill(IEnumerable inputs, IEnumerable outputs, string modelVersion) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + Argument.AssertNotNull(modelVersion, nameof(modelVersion)); + + OdataType = "#Microsoft.Skills.Vision.VectorizeSkill"; + ModelVersion = modelVersion; + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + internal VisionVectorizeSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string modelVersion) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + ModelVersion = modelVersion; + } + + /// Initializes a new instance of for deserialization. + internal VisionVectorizeSkill() + { + } + + /// + /// The version of the model to use when calling the AI Services Vision service. It + /// will default to the latest available when not specified. + /// + public string ModelVersion { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisualFeature.cs b/sdk/search/Azure.Search.Documents/src/Generated/VisualFeature.cs similarity index 98% rename from sdk/search/Azure.Search.Documents/src/Generated/Models/VisualFeature.cs rename to sdk/search/Azure.Search.Documents/src/Generated/VisualFeature.cs index 796ec7f21b8c..271b696c1a8b 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VisualFeature.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/VisualFeature.cs @@ -8,7 +8,7 @@ using System; using System.ComponentModel; -namespace Azure.Search.Documents.Indexes.Models +namespace Azure.Search.Documents { /// The strings indicating what visual feature types to return. public readonly partial struct VisualFeature : IEquatable diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.Serialization.cs new file mode 100644 index 000000000000..5cf899105872 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.Serialization.cs @@ -0,0 +1,297 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class WebApiSkill : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiSkill)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("uri"u8); + writer.WriteStringValue(Uri); + if (Optional.IsCollectionDefined(HttpHeaders)) + { + writer.WritePropertyName("httpHeaders"u8); + writer.WriteStartObject(); + foreach (var item in HttpHeaders) + { + writer.WritePropertyName(item.Key); + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsDefined(HttpMethod)) + { + writer.WritePropertyName("httpMethod"u8); + writer.WriteStringValue(HttpMethod); + } + if (Optional.IsDefined(Timeout)) + { + writer.WritePropertyName("timeout"u8); + writer.WriteStringValue(Timeout.Value, "P"); + } + if (Optional.IsDefined(BatchSize)) + { + writer.WritePropertyName("batchSize"u8); + writer.WriteNumberValue(BatchSize.Value); + } + if (Optional.IsDefined(DegreeOfParallelism)) + { + writer.WritePropertyName("degreeOfParallelism"u8); + writer.WriteNumberValue(DegreeOfParallelism.Value); + } + if (Optional.IsDefined(AuthResourceId)) + { + writer.WritePropertyName("authResourceId"u8); + writer.WriteStringValue(AuthResourceId); + } + if (Optional.IsDefined(AuthIdentity)) + { + writer.WritePropertyName("authIdentity"u8); + writer.WriteObjectValue(AuthIdentity, options); + } + } + + WebApiSkill IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiSkill)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeWebApiSkill(document.RootElement, options); + } + + internal static WebApiSkill DeserializeWebApiSkill(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string uri = default; + IDictionary httpHeaders = default; + string httpMethod = default; + TimeSpan? timeout = default; + int? batchSize = default; + int? degreeOfParallelism = default; + string authResourceId = default; + SearchIndexerDataIdentity authIdentity = default; + string odataType = default; + string name = default; + string description = default; + string context = default; + IList inputs = default; + IList outputs = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("uri"u8)) + { + uri = property.Value.GetString(); + continue; + } + if (property.NameEquals("httpHeaders"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + httpHeaders = dictionary; + continue; + } + if (property.NameEquals("httpMethod"u8)) + { + httpMethod = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + timeout = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("batchSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + batchSize = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("degreeOfParallelism"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + degreeOfParallelism = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("authResourceId"u8)) + { + authResourceId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authIdentity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("context"u8)) + { + context = property.Value.GetString(); + continue; + } + if (property.NameEquals("inputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item, options)); + } + inputs = array; + continue; + } + if (property.NameEquals("outputs"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item, options)); + } + outputs = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new WebApiSkill( + odataType, + name, + description, + context, + inputs, + outputs, + serializedAdditionalRawData, + uri, + httpHeaders ?? new ChangeTrackingDictionary(), + httpMethod, + timeout, + batchSize, + degreeOfParallelism, + authResourceId, + authIdentity); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(WebApiSkill)} does not support writing '{options.Format}' format."); + } + } + + WebApiSkill IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiSkill(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(WebApiSkill)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new WebApiSkill FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiSkill(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.cs new file mode 100644 index 000000000000..e30a17982f0e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiSkill.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// A skill that can call a Web API endpoint, allowing you to extend a skillset by + /// having it call your custom code. + /// + public partial class WebApiSkill : SearchIndexerSkill + { + /// Initializes a new instance of . + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// The url for the Web API. + /// , or is null. + public WebApiSkill(IEnumerable inputs, IEnumerable outputs, string uri) : base(inputs, outputs) + { + Argument.AssertNotNull(inputs, nameof(inputs)); + Argument.AssertNotNull(outputs, nameof(outputs)); + Argument.AssertNotNull(uri, nameof(uri)); + + OdataType = "#Microsoft.Skills.Custom.WebApiSkill"; + Uri = uri; + HttpHeaders = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the skill which uniquely identifies it within the skillset. A skill + /// with no name defined will be given a default name of its 1-based index in the + /// skills array, prefixed with the character '#'. + /// + /// + /// The description of the skill which describes the inputs, outputs, and usage of + /// the skill. + /// + /// + /// Represents the level at which operations take place, such as the document root + /// or document content (for example, /document or /document/content). The default + /// is /document. + /// + /// + /// Inputs of the skills could be a column in the source data set, or the output of + /// an upstream skill. + /// + /// + /// The output of a skill is either a field in a search index, or a value that can + /// be consumed as an input by another skill. + /// + /// Keeps track of any properties unknown to the library. + /// The url for the Web API. + /// The headers required to make the http request. + /// The method for the http request. + /// The desired timeout for the request. Default is 30 seconds. + /// The desired batch size which indicates number of documents. + /// If set, the number of parallel calls that can be made to the Web API. + /// + /// Applies to custom skills that connect to external code in an Azure function or + /// some other application that provides the transformations. This value should be + /// the application ID created for the function or app when it was registered with + /// Azure Active Directory. When specified, the custom skill connects to the + /// function or app using a managed ID (either system or user-assigned) of the + /// search service and the access token of the function or app, using this value as + /// the resource id for creating the scope of the access token. + /// + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + internal WebApiSkill(string odataType, string name, string description, string context, IList inputs, IList outputs, IDictionary serializedAdditionalRawData, string uri, IDictionary httpHeaders, string httpMethod, TimeSpan? timeout, int? batchSize, int? degreeOfParallelism, string authResourceId, SearchIndexerDataIdentity authIdentity) : base(odataType, name, description, context, inputs, outputs, serializedAdditionalRawData) + { + Uri = uri; + HttpHeaders = httpHeaders; + HttpMethod = httpMethod; + Timeout = timeout; + BatchSize = batchSize; + DegreeOfParallelism = degreeOfParallelism; + AuthResourceId = authResourceId; + AuthIdentity = authIdentity; + } + + /// Initializes a new instance of for deserialization. + internal WebApiSkill() + { + } + + /// The url for the Web API. + public string Uri { get; set; } + /// The headers required to make the http request. + public IDictionary HttpHeaders { get; } + /// The method for the http request. + public string HttpMethod { get; set; } + /// The desired timeout for the request. Default is 30 seconds. + public TimeSpan? Timeout { get; set; } + /// The desired batch size which indicates number of documents. + public int? BatchSize { get; set; } + /// If set, the number of parallel calls that can be made to the Web API. + public int? DegreeOfParallelism { get; set; } + /// + /// Applies to custom skills that connect to external code in an Azure function or + /// some other application that provides the transformations. This value should be + /// the application ID created for the function or app when it was registered with + /// Azure Active Directory. When specified, the custom skill connects to the + /// function or app using a managed ID (either system or user-assigned) of the + /// search service and the access token of the function or app, using this value as + /// the resource id for creating the scope of the access token. + /// + public string AuthResourceId { get; set; } + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity AuthIdentity { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.Serialization.cs new file mode 100644 index 000000000000..186ead3cb452 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.Serialization.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class WebApiVectorizer : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiVectorizer)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(WebApiParameters)) + { + writer.WritePropertyName("customWebApiParameters"u8); + writer.WriteObjectValue(WebApiParameters, options); + } + } + + WebApiVectorizer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiVectorizer)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeWebApiVectorizer(document.RootElement, options); + } + + internal static WebApiVectorizer DeserializeWebApiVectorizer(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + WebApiVectorizerParameters customWebApiParameters = default; + string name = default; + VectorSearchVectorizerKind kind = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("customWebApiParameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + customWebApiParameters = WebApiVectorizerParameters.DeserializeWebApiVectorizerParameters(property.Value, options); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("kind"u8)) + { + kind = new VectorSearchVectorizerKind(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new WebApiVectorizer(name, kind, serializedAdditionalRawData, customWebApiParameters); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(WebApiVectorizer)} does not support writing '{options.Format}' format."); + } + } + + WebApiVectorizer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiVectorizer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(WebApiVectorizer)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new WebApiVectorizer FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiVectorizer(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.cs new file mode 100644 index 000000000000..6e0606a878c9 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizer.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Specifies a user-defined vectorizer for generating the vector embedding of a + /// query string. Integration of an external vectorizer is achieved using the + /// custom Web API interface of a skillset. + /// + public partial class WebApiVectorizer : VectorSearchVectorizer + { + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// is null. + public WebApiVectorizer(string vectorizerName) : base(vectorizerName) + { + Argument.AssertNotNull(vectorizerName, nameof(vectorizerName)); + + Kind = VectorSearchVectorizerKind.CustomWebApi; + } + + /// Initializes a new instance of . + /// The name to associate with this particular vectorization method. + /// Type of VectorSearchVectorizer. + /// Keeps track of any properties unknown to the library. + /// Specifies the properties of the user-defined vectorizer. + internal WebApiVectorizer(string vectorizerName, VectorSearchVectorizerKind kind, IDictionary serializedAdditionalRawData, WebApiVectorizerParameters webApiParameters) : base(vectorizerName, kind, serializedAdditionalRawData) + { + WebApiParameters = webApiParameters; + } + + /// Initializes a new instance of for deserialization. + internal WebApiVectorizer() + { + } + + /// Specifies the properties of the user-defined vectorizer. + public WebApiVectorizerParameters WebApiParameters { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.Serialization.cs new file mode 100644 index 000000000000..c16f794669a5 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.Serialization.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class WebApiVectorizerParameters : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiVectorizerParameters)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Url)) + { + writer.WritePropertyName("uri"u8); + writer.WriteStringValue(Url.AbsoluteUri); + } + if (Optional.IsCollectionDefined(HttpHeaders)) + { + writer.WritePropertyName("httpHeaders"u8); + writer.WriteStartObject(); + foreach (var item in HttpHeaders) + { + writer.WritePropertyName(item.Key); + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsDefined(HttpMethod)) + { + writer.WritePropertyName("httpMethod"u8); + writer.WriteStringValue(HttpMethod); + } + if (Optional.IsDefined(Timeout)) + { + writer.WritePropertyName("timeout"u8); + writer.WriteStringValue(Timeout.Value, "P"); + } + if (Optional.IsDefined(AuthResourceId)) + { + writer.WritePropertyName("authResourceId"u8); + writer.WriteStringValue(AuthResourceId); + } + if (Optional.IsDefined(AuthIdentity)) + { + writer.WritePropertyName("authIdentity"u8); + writer.WriteObjectValue(AuthIdentity, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + WebApiVectorizerParameters IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WebApiVectorizerParameters)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeWebApiVectorizerParameters(document.RootElement, options); + } + + internal static WebApiVectorizerParameters DeserializeWebApiVectorizerParameters(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri uri = default; + IDictionary httpHeaders = default; + string httpMethod = default; + TimeSpan? timeout = default; + string authResourceId = default; + SearchIndexerDataIdentity authIdentity = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("uri"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + uri = new Uri(property.Value.GetString()); + continue; + } + if (property.NameEquals("httpHeaders"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, property0.Value.GetString()); + } + httpHeaders = dictionary; + continue; + } + if (property.NameEquals("httpMethod"u8)) + { + httpMethod = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + timeout = property.Value.GetTimeSpan("P"); + continue; + } + if (property.NameEquals("authResourceId"u8)) + { + authResourceId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authIdentity"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authIdentity = SearchIndexerDataIdentity.DeserializeSearchIndexerDataIdentity(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new WebApiVectorizerParameters( + uri, + httpHeaders ?? new ChangeTrackingDictionary(), + httpMethod, + timeout, + authResourceId, + authIdentity, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(WebApiVectorizerParameters)} does not support writing '{options.Format}' format."); + } + } + + WebApiVectorizerParameters IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiVectorizerParameters(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(WebApiVectorizerParameters)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static WebApiVectorizerParameters FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWebApiVectorizerParameters(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.cs b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.cs new file mode 100644 index 000000000000..bfdaf383132a --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WebApiVectorizerParameters.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// Specifies the properties for connecting to a user-defined vectorizer. + public partial class WebApiVectorizerParameters + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public WebApiVectorizerParameters() + { + HttpHeaders = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The URI of the Web API providing the vectorizer. + /// The headers required to make the HTTP request. + /// The method for the HTTP request. + /// The desired timeout for the request. Default is 30 seconds. + /// + /// Applies to custom endpoints that connect to external code in an Azure function + /// or some other application that provides the transformations. This value should + /// be the application ID created for the function or app when it was registered + /// with Azure Active Directory. When specified, the vectorization connects to the + /// function or app using a managed ID (either system or user-assigned) of the + /// search service and the access token of the function or app, using this value as + /// the resource id for creating the scope of the access token. + /// + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Keeps track of any properties unknown to the library. + internal WebApiVectorizerParameters(Uri url, IDictionary httpHeaders, string httpMethod, TimeSpan? timeout, string authResourceId, SearchIndexerDataIdentity authIdentity, IDictionary serializedAdditionalRawData) + { + Url = url; + HttpHeaders = httpHeaders; + HttpMethod = httpMethod; + Timeout = timeout; + AuthResourceId = authResourceId; + AuthIdentity = authIdentity; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The URI of the Web API providing the vectorizer. + public Uri Url { get; set; } + /// The headers required to make the HTTP request. + public IDictionary HttpHeaders { get; } + /// The method for the HTTP request. + public string HttpMethod { get; set; } + /// The desired timeout for the request. Default is 30 seconds. + public TimeSpan? Timeout { get; set; } + /// + /// Applies to custom endpoints that connect to external code in an Azure function + /// or some other application that provides the transformations. This value should + /// be the application ID created for the function or app when it was registered + /// with Azure Active Directory. When specified, the vectorization connects to the + /// function or app using a managed ID (either system or user-assigned) of the + /// search service and the access token of the function or app, using this value as + /// the resource id for creating the scope of the access token. + /// + public string AuthResourceId { get; set; } + /// + /// The user-assigned managed identity used for outbound connections. If an + /// authResourceId is provided and it's not specified, the system-assigned managed + /// identity is used. On updates to the indexer, if the identity is unspecified, + /// the value remains unchanged. If set to "none", the value of this property is + /// cleared. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + public SearchIndexerDataIdentity AuthIdentity { get; set; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.Serialization.cs b/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.Serialization.cs new file mode 100644 index 000000000000..f2157b454712 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.Serialization.cs @@ -0,0 +1,305 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Search.Documents +{ + public partial class WordDelimiterTokenFilter : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WordDelimiterTokenFilter)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(GenerateWordParts)) + { + writer.WritePropertyName("generateWordParts"u8); + writer.WriteBooleanValue(GenerateWordParts.Value); + } + if (Optional.IsDefined(GenerateNumberParts)) + { + writer.WritePropertyName("generateNumberParts"u8); + writer.WriteBooleanValue(GenerateNumberParts.Value); + } + if (Optional.IsDefined(CatenateWords)) + { + writer.WritePropertyName("catenateWords"u8); + writer.WriteBooleanValue(CatenateWords.Value); + } + if (Optional.IsDefined(CatenateNumbers)) + { + writer.WritePropertyName("catenateNumbers"u8); + writer.WriteBooleanValue(CatenateNumbers.Value); + } + if (Optional.IsDefined(CatenateAll)) + { + writer.WritePropertyName("catenateAll"u8); + writer.WriteBooleanValue(CatenateAll.Value); + } + if (Optional.IsDefined(SplitOnCaseChange)) + { + writer.WritePropertyName("splitOnCaseChange"u8); + writer.WriteBooleanValue(SplitOnCaseChange.Value); + } + if (Optional.IsDefined(PreserveOriginal)) + { + writer.WritePropertyName("preserveOriginal"u8); + writer.WriteBooleanValue(PreserveOriginal.Value); + } + if (Optional.IsDefined(SplitOnNumerics)) + { + writer.WritePropertyName("splitOnNumerics"u8); + writer.WriteBooleanValue(SplitOnNumerics.Value); + } + if (Optional.IsDefined(StemEnglishPossessive)) + { + writer.WritePropertyName("stemEnglishPossessive"u8); + writer.WriteBooleanValue(StemEnglishPossessive.Value); + } + if (Optional.IsCollectionDefined(ProtectedWords)) + { + writer.WritePropertyName("protectedWords"u8); + writer.WriteStartArray(); + foreach (var item in ProtectedWords) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + } + + WordDelimiterTokenFilter IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(WordDelimiterTokenFilter)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeWordDelimiterTokenFilter(document.RootElement, options); + } + + internal static WordDelimiterTokenFilter DeserializeWordDelimiterTokenFilter(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? generateWordParts = default; + bool? generateNumberParts = default; + bool? catenateWords = default; + bool? catenateNumbers = default; + bool? catenateAll = default; + bool? splitOnCaseChange = default; + bool? preserveOriginal = default; + bool? splitOnNumerics = default; + bool? stemEnglishPossessive = default; + IList protectedWords = default; + string odataType = default; + string name = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("generateWordParts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + generateWordParts = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("generateNumberParts"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + generateNumberParts = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("catenateWords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + catenateWords = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("catenateNumbers"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + catenateNumbers = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("catenateAll"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + catenateAll = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("splitOnCaseChange"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + splitOnCaseChange = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("preserveOriginal"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + preserveOriginal = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("splitOnNumerics"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + splitOnNumerics = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("stemEnglishPossessive"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + stemEnglishPossessive = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("protectedWords"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + protectedWords = array; + continue; + } + if (property.NameEquals("@odata.type"u8)) + { + odataType = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new WordDelimiterTokenFilter( + odataType, + name, + serializedAdditionalRawData, + generateWordParts, + generateNumberParts, + catenateWords, + catenateNumbers, + catenateAll, + splitOnCaseChange, + preserveOriginal, + splitOnNumerics, + stemEnglishPossessive, + protectedWords ?? new ChangeTrackingList()); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(WordDelimiterTokenFilter)} does not support writing '{options.Format}' format."); + } + } + + WordDelimiterTokenFilter IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWordDelimiterTokenFilter(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(WordDelimiterTokenFilter)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static new WordDelimiterTokenFilter FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeWordDelimiterTokenFilter(document.RootElement); + } + + /// Convert into a . + internal override RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.cs b/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.cs new file mode 100644 index 000000000000..91b284819d3e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Generated/WordDelimiterTokenFilter.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.Search.Documents +{ + /// + /// Splits words into subwords and performs optional transformations on subword + /// groups. This token filter is implemented using Apache Lucene. + /// + public partial class WordDelimiterTokenFilter : TokenFilter + { + /// Initializes a new instance of . + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// is null. + public WordDelimiterTokenFilter(string name) : base(name) + { + Argument.AssertNotNull(name, nameof(name)); + + OdataType = "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; + ProtectedWords = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The discriminator for derived types. + /// + /// The name of the token filter. It must only contain letters, digits, spaces, + /// dashes or underscores, can only start and end with alphanumeric characters, and + /// is limited to 128 characters. + /// + /// Keeps track of any properties unknown to the library. + /// + /// A value indicating whether to generate part words. If set, causes parts of + /// words to be generated; for example "AzureSearch" becomes "Azure" "Search". + /// Default is true. + /// + /// A value indicating whether to generate number subwords. Default is true. + /// + /// A value indicating whether maximum runs of word parts will be catenated. For + /// example, if this is set to true, "Azure-Search" becomes "AzureSearch". Default + /// is false. + /// + /// + /// A value indicating whether maximum runs of number parts will be catenated. For + /// example, if this is set to true, "1-2" becomes "12". Default is false. + /// + /// + /// A value indicating whether all subword parts will be catenated. For example, if + /// this is set to true, "Azure-Search-1" becomes "AzureSearch1". Default is false. + /// + /// + /// A value indicating whether to split words on caseChange. For example, if this + /// is set to true, "AzureSearch" becomes "Azure" "Search". Default is true. + /// + /// + /// A value indicating whether original words will be preserved and added to the + /// subword list. Default is false. + /// + /// + /// A value indicating whether to split on numbers. For example, if this is set to + /// true, "Azure1Search" becomes "Azure" "1" "Search". Default is true. + /// + /// + /// A value indicating whether to remove trailing "'s" for each subword. Default is + /// true. + /// + /// A list of tokens to protect from being delimited. + internal WordDelimiterTokenFilter(string odataType, string name, IDictionary serializedAdditionalRawData, bool? generateWordParts, bool? generateNumberParts, bool? catenateWords, bool? catenateNumbers, bool? catenateAll, bool? splitOnCaseChange, bool? preserveOriginal, bool? splitOnNumerics, bool? stemEnglishPossessive, IList protectedWords) : base(odataType, name, serializedAdditionalRawData) + { + GenerateWordParts = generateWordParts; + GenerateNumberParts = generateNumberParts; + CatenateWords = catenateWords; + CatenateNumbers = catenateNumbers; + CatenateAll = catenateAll; + SplitOnCaseChange = splitOnCaseChange; + PreserveOriginal = preserveOriginal; + SplitOnNumerics = splitOnNumerics; + StemEnglishPossessive = stemEnglishPossessive; + ProtectedWords = protectedWords; + } + + /// Initializes a new instance of for deserialization. + internal WordDelimiterTokenFilter() + { + } + + /// + /// A value indicating whether to generate part words. If set, causes parts of + /// words to be generated; for example "AzureSearch" becomes "Azure" "Search". + /// Default is true. + /// + public bool? GenerateWordParts { get; set; } + /// A value indicating whether to generate number subwords. Default is true. + public bool? GenerateNumberParts { get; set; } + /// + /// A value indicating whether maximum runs of word parts will be catenated. For + /// example, if this is set to true, "Azure-Search" becomes "AzureSearch". Default + /// is false. + /// + public bool? CatenateWords { get; set; } + /// + /// A value indicating whether maximum runs of number parts will be catenated. For + /// example, if this is set to true, "1-2" becomes "12". Default is false. + /// + public bool? CatenateNumbers { get; set; } + /// + /// A value indicating whether all subword parts will be catenated. For example, if + /// this is set to true, "Azure-Search-1" becomes "AzureSearch1". Default is false. + /// + public bool? CatenateAll { get; set; } + /// + /// A value indicating whether to split words on caseChange. For example, if this + /// is set to true, "AzureSearch" becomes "Azure" "Search". Default is true. + /// + public bool? SplitOnCaseChange { get; set; } + /// + /// A value indicating whether original words will be preserved and added to the + /// subword list. Default is false. + /// + public bool? PreserveOriginal { get; set; } + /// + /// A value indicating whether to split on numbers. For example, if this is set to + /// true, "Azure1Search" becomes "Azure" "1" "Search". Default is true. + /// + public bool? SplitOnNumerics { get; set; } + /// + /// A value indicating whether to remove trailing "'s" for each subword. Default is + /// true. + /// + public bool? StemEnglishPossessive { get; set; } + /// A list of tokens to protect from being delimited. + public IList ProtectedWords { get; } + } +} diff --git a/sdk/search/Azure.Search.Documents/src/autorest.md b/sdk/search/Azure.Search.Documents/src/autorest.md deleted file mode 100644 index 794d15c138df..000000000000 --- a/sdk/search/Azure.Search.Documents/src/autorest.md +++ /dev/null @@ -1,561 +0,0 @@ -# Azure.Search.Documents Code Generation - -Run `dotnet build /t:GenerateCode` in the `src` directory to generate SDK code. - -See the [Contributing guidelines](https://github.com/Azure/azure-sdk-for-net/blob/fe0bf0e7e84a406ec2102c194ea05ccd5011a141/sdk/search/CONTRIBUTING.md) for more details. - -## AutoRest Configuration -> see https://aka.ms/autorest - -## Swagger Source(s) -```yaml -title: SearchServiceClient -input-file: - - https://github.com/Azure/azure-rest-api-specs/blob/14531a7cf6101c1dd57e7c1c83103a047bb8f5bb/specification/search/data-plane/Azure.Search/preview/2024-11-01-preview/searchindex.json - - https://github.com/Azure/azure-rest-api-specs/blob/14531a7cf6101c1dd57e7c1c83103a047bb8f5bb/specification/search/data-plane/Azure.Search/preview/2024-11-01-preview/searchservice.json -generation1-convenience-client: true -deserialize-null-collection-as-null-value: true -``` - -## Release hacks -We only want certain client methods for our search query client. -``` yaml -directive: -- remove-operation: Documents_AutocompleteGet -- remove-operation: Documents_SearchGet -- remove-operation: Documents_SuggestGet -``` - -### Suppress Abstract Base Class - -``` yaml -suppress-abstract-base-class: -- CharFilter -- CognitiveServicesAccount -- DataChangeDetectionPolicy -- DataDeletionDetectionPolicy -- LexicalAnalyzer -- LexicalNormalizer -- LexicalTokenizer -- ScoringFunction -- SearchIndexerDataIdentity -- SearchIndexerSkill -- Similarity -- TokenFilter -``` - - -## CodeGen hacks -These should eventually be fixed in the code generator. - -## Swagger hacks -These should eventually be fixed in the swagger files. -``` yaml -directive: - from: swagger-document - where: $.definitions.LexicalNormalizer - transform: > - $["discriminator"] = "@odata.type"; -``` - -## Renaming models after the AI Studio rebrand to AI Foundry -These should eventually be fixed in the swagger files. -```yaml -directive: -- from: "searchservice.json" - where: $.definitions.AIStudioModelCatalogName - transform: $["x-ms-enum"].name = "AIFoundryModelCatalogName"; -``` - -### Mark definitions as objects -The modeler warns about models without an explicit type. -``` yaml -directive: -- from: swagger-document - where: $.definitions.* - transform: > - if (typeof $.type === "undefined") { - $.type = "object"; - } -``` - -### Make Lookup Document behave a little friendlier -It's currently an empty object and adding Additional Properties will generate -a more useful model. -``` yaml -directive: -- from: swagger-document - where: $.paths["/docs('{key}')"].get.responses["200"].schema - transform: > - $.additionalProperties = true; -``` - -### Fix `SearchDocumentsResult["@search.debugInfo"]` -> `SearchDocumentsResult["@search.debug"]` -``` yaml -directive: - - from: searchindex.json - where: $.definitions.SearchDocumentsResult.properties - transform: > - $["@search.debug"] = $["@search.debugInfo"]; - delete $["@search.debugInfo"]; -``` - -### Fix `SearchResult["@search.documentDebugInfo"]` -``` yaml -directive: - - from: searchindex.json - where: $.definitions.SearchResult.properties - transform: > - $["@search.documentDebugInfo"]["$ref"] = $["@search.documentDebugInfo"].items["$ref"]; - delete $["@search.documentDebugInfo"].type; - delete $["@search.documentDebugInfo"].items; -``` - -### Archboard feedback for 11.6.0 - -```yaml -directive: -- from: "searchservice.json" - where: $.definitions - transform: > - $.AzureOpenAIParameters["x-ms-client-name"] = "AzureOpenAIVectorizerParameters"; - $.AzureOpenAIParameters.properties.authIdentity["x-ms-client-name"] = "AuthenticationIdentity"; - $.AzureOpenAIParameters.properties.resourceUri["x-ms-client-name"] = "resourceUri"; - - $.VectorSearchVectorizer.properties.name["x-ms-client-name"] = "VectorizerName"; - $.AzureOpenAIVectorizer.properties.azureOpenAIParameters["x-ms-client-name"] = "Parameters"; - - $.ScalarQuantizationVectorSearchCompressionConfiguration["x-ms-client-name"] = "ScalarQuantizationCompression"; - $.BinaryQuantizationVectorSearchCompressionConfiguration["x-ms-client-name"] = "BinaryQuantizationCompression"; - $.VectorSearchCompressionConfiguration["x-ms-client-name"] = "VectorSearchCompression"; - $.VectorSearchCompressionConfiguration.properties.name["x-ms-client-name"] = "CompressionName"; - $.VectorSearchProfile.properties.compression["x-ms-client-name"] = "CompressionName"; - - $.OcrSkillLineEnding["x-ms-client-name"] = "OcrLineEnding"; - $.OcrSkillLineEnding["x-ms-enum"].name = "OcrLineEnding"; - - $.SearchIndexerDataUserAssignedIdentity.properties.userAssignedIdentity["x-ms-format"] = "arm-id"; - $.SearchIndexerIndexProjections["x-ms-client-name"] = "SearchIndexerIndexProjection"; - $.SearchIndexerSkillset.properties.indexProjections["x-ms-client-name"] = "indexProjection"; - - $.VectorSearchCompressionTargetDataType["x-ms-client-name"] = "VectorSearchCompressionTarget"; - $.VectorSearchCompressionTargetDataType["x-ms-enum"].name = "VectorSearchCompressionTarget"; - - $.WebApiVectorizer.properties.customWebApiParameters["x-ms-client-name"] = "Parameters"; - $.WebApiParameters["x-ms-client-name"] = "WebApiVectorizerParameters"; - $.WebApiParameters.properties.uri["x-ms-client-name"] = "uri"; -``` - -### Change VectorizableImageUrlQuery.Url type to Uri - -```yaml -directive: - from: swagger-document - where: $.definitions.VectorizableImageUrlQuery.properties.url - transform: $.format = "url" -``` - -### Set `hybridSearch` property to be type `HybridSearch` in SearchRequest - -``` yaml -directive: - - from: searchindex.json - where: $.definitions.SearchRequest.properties - transform: > - delete $.hybridSearch["type"]; - delete $.hybridSearch.items; - $.hybridSearch["$ref"] = "#/definitions/HybridSearch"; -``` - -### Enable `RawVectorQuery.vector` as embedding field - -```yaml -directive: -- from: searchindex.json - where: $.definitions.RawVectorQuery.properties.vector - transform: $["x-ms-embedding-vector"] = true; -``` - -### Make `VectorSearchAlgorithmKind` internal - -```yaml -directive: -- from: searchservice.json - where: $.definitions.VectorSearchAlgorithmKind - transform: $["x-accessibility"] = "internal" -``` - -### Make `VectorSearchCompressionKind` internal - -```yaml -directive: -- from: searchservice.json - where: $.definitions.VectorSearchCompressionKind - transform: $["x-accessibility"] = "internal" -``` - -### Make `VectorSearchCompressionKind` internal - -```yaml -directive: -- from: searchservice.json - where: $.definitions.VectorSearchCompressionKind - transform: $["x-accessibility"] = "internal" -``` - -### Make `VectorQueryKind` internal - -```yaml -directive: -- from: searchindex.json - where: $.definitions.VectorQueryKind - transform: $["x-accessibility"] = "internal" -``` - -### Make `VectorSearchVectorizerKind` internal - -```yaml -directive: -- from: searchservice.json - where: $.definitions.VectorSearchVectorizerKind - transform: $["x-accessibility"] = "internal" -``` - -### Make `VectorThresholdKind` internal - -```yaml -directive: -- from: searchindex.json - where: $.definitions.VectorThresholdKind - transform: $["x-accessibility"] = "internal" -``` - -### Rename `RawVectorQuery` to `VectorizedQuery` - -```yaml -directive: -- from: searchindex.json - where: $.definitions.RawVectorQuery - transform: $["x-ms-client-name"] = "VectorizedQuery"; -``` - -### Rename `AMLVectorizer` to `AzureMachineLearningVectorizer` - -```yaml -directive: -- from: searchservice.json - where: $.definitions.AMLVectorizer - transform: $["x-ms-client-name"] = "AzureMachineLearningVectorizer"; -``` - -### Rename `AMLParameters` to `AzureMachineLearningParameters` - -```yaml -directive: -- from: searchservice.json - where: $.definitions.AMLParameters - transform: $["x-ms-client-name"] = "AzureMachineLearningParameters"; -``` - -### Rename `ServiceLimits.maxStoragePerIndex` to `ServiceLimits.maxStoragePerIndexInBytes` - -```yaml -directive: -- from: searchservice.json - where: $.definitions.ServiceLimits - transform: $.properties.maxStoragePerIndex["x-ms-client-name"] = "maxStoragePerIndexInBytes"; -``` - -### Rename `PIIDetectionSkill.minimumPrecision` to `PIIDetectionSkill.MinPrecision` - -```yaml -directive: - - from: searchservice.json - where: $.definitions.PIIDetectionSkill - transform: $.properties.minimumPrecision["x-ms-client-name"] = "MinPrecision"; -``` - -### Rename `VectorQuery` property `K` - - Rename `VectorQuery` property `K` to `KNearestNeighborsCount` - -```yaml -directive: -- from: searchindex.json - where: $.definitions.VectorQuery.properties.k - transform: $["x-ms-client-name"] = "KNearestNeighborsCount"; -``` - -### Rename one of SearchMode definitions - -SearchMode is duplicated across swaggers. Rename one of them, even though it will be internalized. -This prevents the serializer from attempting to use undefined values until [Azure/autorest.csharp#583](https://github.com/Azure/autorest.csharp/issues/583) is fixed. - -```yaml -directive: -- from: searchservice.json - where: $.definitions.Suggester.properties.searchMode - transform: $["x-ms-enum"].name = "SuggesterMode"; -``` - -### Add nullable annotations - -``` yaml -directive: - from: swagger-document - where: $.definitions.SynonymMap - transform: > - $.properties.encryptionKey["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchField - transform: > - $.properties.indexAnalyzer["x-nullable"] = true; - $.properties.searchAnalyzer["x-nullable"] = true; - $.properties.analyzer["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.ScoringProfile - transform: > - $.properties.text["x-nullable"] = true; - $.properties.functionAggregation["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchIndex - transform: > - $.properties.encryptionKey["x-nullable"] = true; - $.properties.corsOptions["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.BM25Similarity - transform: > - $.properties.k1["x-nullable"] = true; - $.properties.b["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchIndexerDataSource - transform: > - $.properties.dataChangeDetectionPolicy["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchIndexerDataSource - transform: > - $.properties.dataDeletionDetectionPolicy["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchIndexer - transform: > - $.properties.disabled["x-nullable"] = true; - $.properties.schedule["x-nullable"] = true; - $.properties.parameters["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.SearchIndexerStatus - transform: > - $.properties.lastResult["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.TextTranslationSkill - transform: > - $.properties.suggestedFrom["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.IndexingParameters - transform: > - $.properties.batchSize["x-nullable"] = true; - $.properties.maxFailedItems["x-nullable"] = true; - $.properties.maxFailedItemsPerBatch["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.FieldMapping - transform: > - $.properties.mappingFunction["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.IndexerExecutionResult - transform: > - $.properties.endTime["x-nullable"] = true; - $.properties.statusDetail["x-nullable"] = true; -``` - -``` yaml -directive: - from: swagger-document - where: $.definitions.CorsOptions - transform: > - $.properties.maxAgeInSeconds["x-nullable"] = true; -``` - -#### Skills - -``` yaml -directive: -- from: swagger-document - where: $.definitions.EntityRecognitionSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.ImageAnalysisSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.KeyPhraseExtractionSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.OcrSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - $.properties.detectOrientation["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.SentimentSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.SplitSkill - transform: > - $.properties.defaultLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.TextTranslationSkill - transform: > - $.properties.defaultFromLanguageCode["x-nullable"] = true; - -- from: swagger-document - where: $.definitions.WebApiSkill - transform: > - $.properties.httpHeaders["x-nullable"] = true; - $.properties.timeout["x-nullable"] = true; -``` - -## C# Customizations -Shape the swagger APIs to produce the best C# API possible. We can consider -fixing these in the swagger files if they would benefit other languages. - -### Property name changes -Change the name of some properties so they are properly CamelCased. -``` yaml -modelerfour: - naming: - override: - "@odata.type": ODataType -``` - -### Disable parameter grouping - -AutoRest C# supports parameter grouping now, temporary disabling to reduce the change size. - -``` yaml -modelerfour: - group-parameters: false -``` - -### Set odata.metadata Accept header in operations - -searchindex.json needs odata.metadata=none and searchservice.json needs odata.metadata=minimal in the Accept header. - -```yaml -directive: -- from: swagger-document - where: $.paths - transform: > - for (var path in $) { - for (var opName in $[path]) { - var accept = "application/json; odata.metadata="; - accept += path.startsWith("/docs") ? "none" : "minimal"; - - var op = $[path][opName]; - op.parameters.push({ - name: "Accept", - "in": "header", - required: true, - type: "string", - enum: [ accept ], - "x-ms-enum": { "modelAsString": false }, - "x-ms-parameter-location": "method" - }); - } - } - - return $; -``` - -### Move service models to Azure.Search.Documents.Indexes.Models - -Models in searchservice.json should be moved to Azure.Search.Documents.Indexes.Models. - -```yaml -directive: - from: searchservice.json - where: $.definitions.* - transform: > - $["x-namespace"] = "Azure.Search.Documents.Indexes.Models" -``` - -### Relocate x-ms-client-request-id parameter - -Remove the `x-ms-client-request-id` parameter from all methods and put it on the client. -This will be later removed when https://github.com/Azure/autorest.csharp/issues/782 is resolved. -Several attempts at just removing the parameter have caused downstream issues, so relocating it for now. - -```yaml -directive: - from: swagger-document - where: $.parameters.ClientRequestIdParameter - transform: $["x-ms-parameter-location"] = "client"; -``` - -## Seal single value enums - -Prevents the creation of single-value extensible enum in generated code. The following single-value enum will be generated as string constant. - -```yaml -directive: - from: swagger-document - where: $.parameters.PreferHeaderParameter - transform: > - $["x-ms-enum"] = { - "modelAsString": false - } -``` diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Aliases.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Aliases.cs new file mode 100644 index 000000000000..1ad9b83b1ca4 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Aliases.cs @@ -0,0 +1,312 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_Aliases + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_Create_SearchServiceCreateAlias() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempalias", + ["indexes"] = new object[] + { +"preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_Create_SearchServiceCreateAlias_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempalias", + ["indexes"] = new object[] + { +"preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_Create_SearchServiceCreateAlias_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + SearchAlias @alias = new SearchAlias("tempalias", new string[] { "preview-test" }) + { + ETag = "0x1234568AE7E58A1", + }; + Response response = client.Create(@alias); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_Create_SearchServiceCreateAlias_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + SearchAlias @alias = new SearchAlias("tempalias", new string[] { "preview-test" }) + { + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateAsync(@alias); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_CreateOrUpdate_SearchServiceCreateOrUpdateAlias() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myalias", + ["indexes"] = new object[] + { +"preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.CreateOrUpdate("myalias", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_CreateOrUpdate_SearchServiceCreateOrUpdateAlias_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myalias", + ["indexes"] = new object[] + { +"preview-test" + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateOrUpdateAsync("myalias", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_CreateOrUpdate_SearchServiceCreateOrUpdateAlias_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + SearchAlias @alias = new SearchAlias("myalias", new string[] { "preview-test" }) + { + ETag = "0x1234568AE7E58A1", + }; + Response response = client.CreateOrUpdate("myalias", @alias); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_CreateOrUpdate_SearchServiceCreateOrUpdateAlias_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + SearchAlias @alias = new SearchAlias("myalias", new string[] { "preview-test" }) + { + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateOrUpdateAsync("myalias", @alias); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_Delete_SearchServiceDeleteAlias() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = client.Delete("tempalias"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_Delete_SearchServiceDeleteAlias_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = await client.DeleteAsync("tempalias"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_GetAlias_SearchServiceGetAlias() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = client.GetAlias("myalias", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_GetAlias_SearchServiceGetAlias_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = await client.GetAliasAsync("myalias", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_GetAlias_SearchServiceGetAlias_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = client.GetAlias("myalias"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_GetAlias_SearchServiceGetAlias_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + Response response = await client.GetAliasAsync("myalias"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_GetAliases_SearchServiceListAliases() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + foreach (BinaryData item in client.GetAliases(null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_GetAliases_SearchServiceListAliases_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + await foreach (BinaryData item in client.GetAliasesAsync(null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("indexes")[0].ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Aliases_GetAliases_SearchServiceListAliases_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + foreach (SearchAlias item in client.GetAliases()) + { + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Aliases_GetAliases_SearchServiceListAliases_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Aliases client = new SearchClient(endpoint, credential).GetAliasesClient(); + + await foreach (SearchAlias item in client.GetAliasesAsync()) + { + } + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_DataSources.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_DataSources.cs new file mode 100644 index 000000000000..d27316c85590 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_DataSources.cs @@ -0,0 +1,451 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.Search.Documents.Indexes.Models; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_DataSources + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_CreateOrUpdate_SearchServiceCreateOrUpdateDataSource() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = client.CreateOrUpdate("tempdatasource", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_CreateOrUpdate_SearchServiceCreateOrUpdateDataSource_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = await client.CreateOrUpdateAsync("tempdatasource", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_CreateOrUpdate_SearchServiceCreateOrUpdateDataSource_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials + { + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, new SearchIndexerDataContainer("doc-extraction-skillset") + { + Query = "E2E_Dsat", + }) + { + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = client.CreateOrUpdate("tempdatasource", dataSource); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_CreateOrUpdate_SearchServiceCreateOrUpdateDataSource_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials + { + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, new SearchIndexerDataContainer("doc-extraction-skillset") + { + Query = "E2E_Dsat", + }) + { + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = await client.CreateOrUpdateAsync("tempdatasource", dataSource); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_Delete_SearchServiceDeleteDataSource() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = client.Delete("tempdatasource"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_Delete_SearchServiceDeleteDataSource_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = await client.DeleteAsync("tempdatasource"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_GetDataSource_SearchServiceGetDataSource() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = client.GetDataSource("mydocdbdatasource", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_GetDataSource_SearchServiceGetDataSource_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = await client.GetDataSourceAsync("mydocdbdatasource", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_GetDataSource_SearchServiceGetDataSource_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = client.GetDataSource("mydocdbdatasource"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_GetDataSource_SearchServiceGetDataSource_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = await client.GetDataSourceAsync("mydocdbdatasource"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_GetDataSources_SearchServiceListDataSources() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = client.GetDataSources(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_GetDataSources_SearchServiceListDataSources_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = await client.GetDataSourcesAsync(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_GetDataSources_SearchServiceListDataSources_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = client.GetDataSources(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_GetDataSources_SearchServiceListDataSources_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Response response = await client.GetDataSourcesAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_Create_SearchServiceCreateDataSource() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_Create_SearchServiceCreateDataSource_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempdatasource", + ["description"] = "My Azure Blob data source.", + ["type"] = "azureblob", + ["credentials"] = new + { + connectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, + ["container"] = new + { + name = "doc-extraction-skillset", + query = "E2E_Dsat", + }, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("type").ToString()); + Console.WriteLine(result.GetProperty("credentials").ToString()); + Console.WriteLine(result.GetProperty("container").GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DataSources_Create_SearchServiceCreateDataSource_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials + { + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, new SearchIndexerDataContainer("doc-extraction-skillset") + { + Query = "E2E_Dsat", + }) + { + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = client.Create(dataSource); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DataSources_Create_SearchServiceCreateDataSource_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + DataSources client = new SearchClient(endpoint, credential).GetDataSourcesClient(); + + Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection dataSource = new Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection("tempdatasource", SearchIndexerDataSourceType.AzureBlob, new DataSourceCredentials + { + ConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + }, new SearchIndexerDataContainer("doc-extraction-skillset") + { + Query = "E2E_Dsat", + }) + { + Description = "My Azure Blob data source.", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = await client.CreateAsync(dataSource); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Documents.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Documents.cs new file mode 100644 index 000000000000..4214ef7efa89 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Documents.cs @@ -0,0 +1,1627 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.Search.Documents.Models; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_Documents + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_Count_SearchIndexCountDocuments() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.Count("preview-test", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_Count_SearchIndexCountDocuments_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.CountAsync("preview-test", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_Count_SearchIndexCountDocuments_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.Count("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_Count_SearchIndexCountDocuments_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.CountAsync("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchGet_SearchIndexSearchDocumentsGet() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SearchGet("myindex", "nice hotels", true, new string[] { "category,count:10,sort:count" }, "rating gt 10", new string[] { "title" }, "", "", 80, new string[] { "search.score() desc", "rating desc" }, "simple", null, "sp", new string[] { "title", "description" }, "any", "global", "mysessionid", new string[] { "docId", "title", "description" }, 100, 10, null, null, null, null, null, null, null, null, null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchGet_SearchIndexSearchDocumentsGet_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SearchGetAsync("myindex", "nice hotels", true, new string[] { "category,count:10,sort:count" }, "rating gt 10", new string[] { "title" }, "", "", 80, new string[] { "search.score() desc", "rating desc" }, "simple", null, "sp", new string[] { "title", "description" }, "any", "global", "mysessionid", new string[] { "docId", "title", "description" }, 100, 10, null, null, null, null, null, null, null, null, null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchGet_SearchIndexSearchDocumentsGet_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SearchGet("myindex"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchGet_SearchIndexSearchDocumentsGet_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SearchGetAsync("myindex"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchGet_SearchIndexSearchDocumentsSemanticGet() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SearchGet("myindex", "how do clouds form", true, null, null, null, "", "", null, null, "semantic", null, null, null, null, null, null, null, null, null, "my-semantic-config", "partial", 780, null, null, null, null, null, null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchGet_SearchIndexSearchDocumentsSemanticGet_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SearchGetAsync("myindex", "how do clouds form", true, null, null, null, "", "", null, null, "semantic", null, null, null, null, null, null, null, null, null, "my-semantic-config", "partial", 780, null, null, null, null, null, null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchGet_SearchIndexSearchDocumentsSemanticGet_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SearchGet("myindex"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchGet_SearchIndexSearchDocumentsSemanticGet_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SearchGetAsync("myindex"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchPost_SearchIndexSearchDocumentsPost() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + count = true, + facets = new object[] + { +"ownerId", +"price,metric:sum,default:10" + }, + filter = "category eq 'purple' or category eq 'pink'", + highlight = "category", + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 100, + queryType = "semantic", + scoringStatistics = "global", + sessionId = "mysessionid", + scoringParameters = new object[] + { +"categoryTag:desiredCategoryValue" + }, + scoringProfile = "stringFieldBoost", + search = "purple", + searchFields = "id,name,description,category,ownerId", + searchMode = "any", + queryLanguage = "en-us", + speller = "lexicon", + select = "id,name,description,category,ownerId", + skip = 0, + top = 10, + semanticConfiguration = "testconfig", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 5000, + semanticQuery = "find all purple", + answers = "extractive", + captions = "extractive", + queryRewrites = "generative", + vectorQueries = new object[] + { +new +{ +vector = new object[] +{ +0F, +1F, +2F, +3F, +4F, +5F, +6F, +7F, +8F, +9F +}, +kind = "vector", +k = 50, +fields = "vector22, vector1b", +oversampling = 20, +weight = 1F, +threshold = new +{ +value = 0.984, +kind = "vectorSimilarity", +}, +filterOverride = "ownerId eq 'sam'", +} + }, + vectorFilterMode = "preFilter", + hybridSearch = new + { + maxTextRecallSize = 100, + countAndFacetMode = "countAllResults", + }, + }); + Response response = client.SearchPost("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchPost_SearchIndexSearchDocumentsPost_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + count = true, + facets = new object[] + { +"ownerId", +"price,metric:sum,default:10" + }, + filter = "category eq 'purple' or category eq 'pink'", + highlight = "category", + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 100, + queryType = "semantic", + scoringStatistics = "global", + sessionId = "mysessionid", + scoringParameters = new object[] + { +"categoryTag:desiredCategoryValue" + }, + scoringProfile = "stringFieldBoost", + search = "purple", + searchFields = "id,name,description,category,ownerId", + searchMode = "any", + queryLanguage = "en-us", + speller = "lexicon", + select = "id,name,description,category,ownerId", + skip = 0, + top = 10, + semanticConfiguration = "testconfig", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 5000, + semanticQuery = "find all purple", + answers = "extractive", + captions = "extractive", + queryRewrites = "generative", + vectorQueries = new object[] + { +new +{ +vector = new object[] +{ +0F, +1F, +2F, +3F, +4F, +5F, +6F, +7F, +8F, +9F +}, +kind = "vector", +k = 50, +fields = "vector22, vector1b", +oversampling = 20, +weight = 1F, +threshold = new +{ +value = 0.984, +kind = "vectorSimilarity", +}, +filterOverride = "ownerId eq 'sam'", +} + }, + vectorFilterMode = "preFilter", + hybridSearch = new + { + maxTextRecallSize = 100, + countAndFacetMode = "countAllResults", + }, + }); + Response response = await client.SearchPostAsync("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchPost_SearchIndexSearchDocumentsPost_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SearchOptions searchOptions = null; + Response response = client.SearchPost("preview-test", searchOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchPost_SearchIndexSearchDocumentsPost_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SearchOptions searchOptions = null; + Response response = await client.SearchPostAsync("preview-test", searchOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchPost_SearchIndexSearchDocumentsSemanticPost() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + count = true, + highlightPostTag = "", + highlightPreTag = "", + queryType = "semantic", + search = "how do clouds form", + semanticConfiguration = "my-semantic-config", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 780, + }); + Response response = client.SearchPost("myindex", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchPost_SearchIndexSearchDocumentsSemanticPost_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + count = true, + highlightPostTag = "", + highlightPreTag = "", + queryType = "semantic", + search = "how do clouds form", + semanticConfiguration = "my-semantic-config", + semanticErrorHandling = "partial", + semanticMaxWaitInMilliseconds = 780, + }); + Response response = await client.SearchPostAsync("myindex", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.score").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SearchPost_SearchIndexSearchDocumentsSemanticPost_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SearchOptions searchOptions = null; + Response response = client.SearchPost("myindex", searchOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SearchPost_SearchIndexSearchDocumentsSemanticPost_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SearchOptions searchOptions = null; + Response response = await client.SearchPostAsync("myindex", searchOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_GetDocument_SearchIndexGetDocument() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.GetDocument("preview-test", "1", new string[] { "id", "description", "name", "category", "ownerId" }, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_GetDocument_SearchIndexGetDocument_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.GetDocumentAsync("preview-test", "1", new string[] { "id", "description", "name", "category", "ownerId" }, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_GetDocument_SearchIndexGetDocument_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.GetDocument("preview-test", "1"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_GetDocument_SearchIndexGetDocument_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.GetDocumentAsync("preview-test", "1"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SuggestGet_SearchIndexSuggestDocumentsGet() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SuggestGet("myindex", "hote", "sg", "rating gt 10", false, "", "", 80, new string[] { "search.score() desc", "rating desc" }, new string[] { "title" }, new string[] { "docId", "title", "description" }, 10, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SuggestGet_SearchIndexSuggestDocumentsGet_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SuggestGetAsync("myindex", "hote", "sg", "rating gt 10", false, "", "", 80, new string[] { "search.score() desc", "rating desc" }, new string[] { "title" }, new string[] { "docId", "title", "description" }, 10, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SuggestGet_SearchIndexSuggestDocumentsGet_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.SuggestGet("myindex", "hote", "sg"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SuggestGet_SearchIndexSuggestDocumentsGet_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.SuggestGetAsync("myindex", "hote", "sg"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SuggestPost_SearchIndexSuggestDocumentsPost() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + filter = "ownerId eq 'sam' and id lt '15'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + orderby = "id desc", + search = "p", + searchFields = "category", + select = "id,name,category,ownerId", + suggesterName = "sg", + top = 10, + }); + Response response = client.SuggestPost("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SuggestPost_SearchIndexSuggestDocumentsPost_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + filter = "ownerId eq 'sam' and id lt '15'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + orderby = "id desc", + search = "p", + searchFields = "category", + select = "id,name,category,ownerId", + suggesterName = "sg", + top = 10, + }); + Response response = await client.SuggestPostAsync("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("@search.text").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_SuggestPost_SearchIndexSuggestDocumentsPost_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SuggestOptions suggestOptions = null; + Response response = client.SuggestPost("preview-test", suggestOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_SuggestPost_SearchIndexSuggestDocumentsPost_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + SuggestOptions suggestOptions = null; + Response response = await client.SuggestPostAsync("preview-test", suggestOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_Index_SearchIndexIndexDocuments() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + value = new object[] + { +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +} + }, + }); + Response response = client.Index("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("key").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("statusCode").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_Index_SearchIndexIndexDocuments_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + value = new object[] + { +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +}, +new Dictionary +{ +["@search.action"] = "mergeOrUpload" +} + }, + }); + Response response = await client.IndexAsync("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("key").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("statusCode").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_Index_SearchIndexIndexDocuments_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + IndexBatch batch = new IndexBatch(new IndexAction[] + { +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +} + }); + Response response = client.Index("preview-test", batch); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_Index_SearchIndexIndexDocuments_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + IndexBatch batch = new IndexBatch(new IndexAction[] + { +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +}, +new IndexAction +{ +ActionType = IndexActionType.MergeOrUpload, +} + }); + Response response = await client.IndexAsync("preview-test", batch); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_AutocompleteGet_SearchIndexAutocompleteDocumentsGet() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.AutocompleteGet("myindex", "washington medic", "sg", "oneTerm", null, false, "", "", 80, new string[] { "title", "description" }, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_AutocompleteGet_SearchIndexAutocompleteDocumentsGet_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.AutocompleteGetAsync("myindex", "washington medic", "sg", "oneTerm", null, false, "", "", 80, new string[] { "title", "description" }, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_AutocompleteGet_SearchIndexAutocompleteDocumentsGet_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = client.AutocompleteGet("myindex", "washington medic", "sg"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_AutocompleteGet_SearchIndexAutocompleteDocumentsGet_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + Response response = await client.AutocompleteGetAsync("myindex", "washington medic", "sg"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_AutocompletePost_SearchIndexAutocompleteDocumentsPost() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + search = "p", + autocompleteMode = "oneTerm", + filter = "ownerId ne '1'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + searchFields = "category, ownerId", + suggesterName = "sg", + top = 10, + }); + Response response = client.AutocompletePost("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_AutocompletePost_SearchIndexAutocompleteDocumentsPost_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + using RequestContent content = RequestContent.Create(new + { + search = "p", + autocompleteMode = "oneTerm", + filter = "ownerId ne '1'", + fuzzy = true, + highlightPostTag = "", + highlightPreTag = "", + minimumCoverage = 80, + searchFields = "category, ownerId", + suggesterName = "sg", + top = 10, + }); + Response response = await client.AutocompletePostAsync("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("queryPlusText").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Documents_AutocompletePost_SearchIndexAutocompleteDocumentsPost_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + AutocompleteOptions autocompleteOptions = null; + Response response = client.AutocompletePost("preview-test", autocompleteOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Documents_AutocompletePost_SearchIndexAutocompleteDocumentsPost_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Documents client = new SearchClient(endpoint, credential).GetDocumentsClient(); + + AutocompleteOptions autocompleteOptions = null; + Response response = await client.AutocompletePostAsync("preview-test", autocompleteOptions); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexers.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexers.cs new file mode 100644 index 000000000000..56d66ea6c9ae --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexers.cs @@ -0,0 +1,990 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using System.Xml; +using Azure.Core; +using Azure.Identity; +using Azure.Search.Documents.Indexes.Models; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_Indexers + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_Reset_SearchServiceResetIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.Reset("myindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_Reset_SearchServiceResetIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.ResetAsync("myindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_ResetDocs_SearchServiceResetDocs() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new + { + documentKeys = new object[] + { +"1", +"2", +"3" + }, + }); + Response response = client.ResetDocs("myindexer", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_ResetDocs_SearchServiceResetDocs_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new + { + documentKeys = new object[] + { +"1", +"2", +"3" + }, + }); + Response response = await client.ResetDocsAsync("myindexer", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_ResetDocs_SearchServiceResetDocs_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.ResetDocs("myindexer"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_ResetDocs_SearchServiceResetDocs_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.ResetDocsAsync("myindexer"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_Run_SearchServiceRunIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.Run("myindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_Run_SearchServiceRunIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.RunAsync("myindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_CreateOrUpdate_SearchServiceCreateOrUpdateIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToOne", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["outputFieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = client.CreateOrUpdate("myindexer", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_CreateOrUpdate_SearchServiceCreateOrUpdateIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToOne", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["outputFieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = await client.CreateOrUpdateAsync("myindexer", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_CreateOrUpdate_SearchServiceCreateOrUpdateIndexer_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") + { + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToOne, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + OutputFieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = client.CreateOrUpdate("myindexer", indexer); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_CreateOrUpdate_SearchServiceCreateOrUpdateIndexer_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") + { + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToOne, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + OutputFieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = await client.CreateOrUpdateAsync("myindexer", indexer); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_Delete_SearchServiceDeleteIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.Delete("tempindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_Delete_SearchServiceDeleteIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.DeleteAsync("tempindexer"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetIndexer_SearchServiceGetIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetIndexer("myindexer", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetIndexer_SearchServiceGetIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetIndexerAsync("myindexer", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetIndexer_SearchServiceGetIndexer_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetIndexer("myindexer"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetIndexer_SearchServiceGetIndexer_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetIndexerAsync("myindexer"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetIndexers_SearchServiceListIndexers() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetIndexers("*", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetIndexers_SearchServiceListIndexers_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetIndexersAsync("*", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetIndexers_SearchServiceListIndexers_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetIndexers(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetIndexers_SearchServiceListIndexers_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetIndexersAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_Create_SearchServiceCreateIndexer() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToMany", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["outputFieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["cache"] = new + { + storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + enableReprocessing = true, + } + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_Create_SearchServiceCreateIndexer_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "myindexer", + ["description"] = "Description of the indexer", + ["dataSourceName"] = "mydocdbdatasource", + ["skillsetName"] = "myskillset", + ["targetIndexName"] = "preview-test", + ["schedule"] = new + { + interval = "P1D", + startTime = "2025-01-07T19:30:00Z", + }, + ["parameters"] = new + { + batchSize = 10, + maxFailedItems = 10, + maxFailedItemsPerBatch = 5, + configuration = new + { + parsingMode = "markdown", + excludedFileNameExtensions = ".png,.mp4", + indexedFileNameExtensions = ".docx,.pptx", + failOnUnsupportedContentType = true, + failOnUnprocessableDocument = false, + indexStorageMetadataOnlyForOversizedDocuments = true, + delimitedTextHeaders = "Header1,Header2", + delimitedTextDelimiter = "|", + firstLineContainsHeaders = true, + markdownParsingSubmode = "oneToMany", + markdownHeaderDepth = "h6", + documentRoot = "/root", + dataToExtract = "storageMetadata", + imageAction = "none", + allowSkillsetToReadFileData = false, + pdfTextRotationAlgorithm = "none", + executionEnvironment = "standard", + }, + }, + ["fieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["outputFieldMappings"] = new object[] + { +new +{ +sourceFieldName = "/document", +targetFieldName = "name", +mappingFunction = new +{ +name = "base64Encode", +}, +} + }, + ["disabled"] = false, + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["cache"] = new + { + storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + enableReprocessing = true, + } + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("dataSourceName").ToString()); + Console.WriteLine(result.GetProperty("targetIndexName").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_Create_SearchServiceCreateIndexer_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") + { + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToMany, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + OutputFieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + Cache = new SearchIndexerCache + { + StorageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + EnableReprocessing = true, + }, + }; + Response response = client.Create(indexer); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_Create_SearchServiceCreateIndexer_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + SearchIndexer indexer = new SearchIndexer("myindexer", "mydocdbdatasource", "preview-test") + { + Description = "Description of the indexer", + SkillsetName = "myskillset", + Schedule = new IndexingSchedule(XmlConvert.ToTimeSpan("P1D")) + { + StartTime = DateTimeOffset.Parse("2025-01-07T19:30:00Z"), + }, + Parameters = new IndexingParameters + { + BatchSize = 10, + MaxFailedItems = 10, + MaxFailedItemsPerBatch = 5, + Configuration = new IndexingParametersConfiguration + { + ParsingMode = BlobIndexerParsingMode.Markdown, + ExcludedFileNameExtensions = ".png,.mp4", + IndexedFileNameExtensions = ".docx,.pptx", + FailOnUnsupportedContentType = true, + FailOnUnprocessableDocument = false, + IndexStorageMetadataOnlyForOversizedDocuments = true, + DelimitedTextHeaders = "Header1,Header2", + DelimitedTextDelimiter = "|", + FirstLineContainsHeaders = true, + MarkdownParsingSubmode = MarkdownParsingSubmode.OneToMany, + MarkdownHeaderDepth = MarkdownHeaderDepth.H6, + DocumentRoot = "/root", + DataToExtract = BlobIndexerDataToExtract.StorageMetadata, + ImageAction = BlobIndexerImageAction.None, + AllowSkillsetToReadFileData = false, + PdfTextRotationAlgorithm = Search.Documents.Indexes.Models.BlobIndexerPdfTextRotationAlgorithm.None, + ExecutionEnvironment = IndexerExecutionEnvironment.Standard, + }, + }, + FieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + OutputFieldMappings = {new FieldMapping("/document") +{ +TargetFieldName = "name", +MappingFunction = new FieldMappingFunction("base64Encode"), +}}, + IsDisabled = false, + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + Cache = new SearchIndexerCache + { + StorageConnectionString = "DefaultEndpointsProtocol=https;AccountName=myAccountName;AccountKey=myAccountKey;EndpointSuffix=core.windows.net ", + EnableReprocessing = true, + }, + }; + Response response = await client.CreateAsync(indexer); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetStatus_SearchServiceGetIndexerStatus() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetStatus("myindexer", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("errorMessage").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("warnings")[0].GetProperty("message").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsProcessed").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsFailed").ToString()); + Console.WriteLine(result.GetProperty("limits").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetStatus_SearchServiceGetIndexerStatus_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetStatusAsync("myindexer", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("errorMessage").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("errors")[0].GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("warnings")[0].GetProperty("message").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsProcessed").ToString()); + Console.WriteLine(result.GetProperty("executionHistory")[0].GetProperty("itemsFailed").ToString()); + Console.WriteLine(result.GetProperty("limits").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexers_GetStatus_SearchServiceGetIndexerStatus_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = client.GetStatus("myindexer"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexers_GetStatus_SearchServiceGetIndexerStatus_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Indexers client = new SearchClient(endpoint, credential).GetIndexersClient(); + + Response response = await client.GetStatusAsync("myindexer"); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexes.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexes.cs new file mode 100644 index 000000000000..efacc23ecd14 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Indexes.cs @@ -0,0 +1,2764 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using System.Xml; +using Azure.Core; +using Azure.Identity; +using Azure.Search.Documents.Indexes.Models; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_Indexes + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_Create_SearchServiceCreateIndex() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { +new +{ +name = "id", +type = "Edm.String", +key = true, +sortable = true, +}, +new +{ +name = "vector1", +retrievable = true, +searchable = true, +dimensions = 20, +vectorSearchProfile = "config1", +}, +new +{ +name = "vector1b", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector2", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector3", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector22", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector4", +retrievable = true, +searchable = true, +dimensions = 32, +vectorSearchProfile = "config4", +}, +new +{ +name = "name", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "description", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "standard.lucene", +}, +new +{ +name = "category", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "ownerId", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "price", +type = "Edm.Double", +retrievable = true, +filterable = true, +sortable = true, +facetable = true, +} + }, + ["scoringProfiles"] = new object[] + { +new +{ +name = "stringFieldBoost", +text = new +{ +weights = new +{ +name = 3, +description = 1, +category = 2, +ownerId = 1, +}, +}, +functions = new object[] +{ +new +{ +tag = new +{ +tagsParameter = "categoryTag", +}, +type = "tag", +fieldName = "category", +boost = 2, +} +}, +} + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { +"https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { +new +{ +name = "sg", +searchMode = "analyzingInfixMatching", +sourceFields = new object[] +{ +"category", +"ownerId" +}, +} + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { +new +{ +name = "testconfig", +prioritizedFields = new +{ +titleField = new +{ +fieldName = "category", +}, +prioritizedContentFields = new object[] +{ +new +{ +fieldName = "description", +} +}, +prioritizedKeywordsFields = new object[] +{ +new +{ +fieldName = "ownerId", +} +}, +}, +flightingOptIn = true, +} + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { +new +{ +name = "config1", +algorithm = "cosine", +vectorizer = "openai", +compression = "mySQ8", +}, +new +{ +name = "config2", +algorithm = "euclidean", +vectorizer = "custom-web-api", +compression = "mySQ8", +}, +new +{ +name = "config3", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQC", +}, +new +{ +name = "config4", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQWithoutOriginals", +} + }, + algorithms = new object[] + { +new +{ +hnswParameters = new +{ +metric = "cosine", +}, +name = "cosine", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "euclidean", +}, +name = "euclidean", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "dotProduct", +}, +name = "dotProduct", +kind = "hnsw", +} + }, + vectorizers = new object[] + { +new +{ +azureOpenAIParameters = new +{ +resourceUri = "https://test-sample.openai.azure.com/", +deploymentId = "model", +apiKey = "api-key", +modelName = "text-embedding-3-large", +}, +name = "openai", +kind = "azureOpenAI", +}, +new +{ +customWebApiParameters = new +{ +uri = "https://my-custom-endpoint.org/", +httpHeaders = new +{ +header1 = "value1", +header2 = "value2", +}, +httpMethod = "POST", +timeout = "PT1M", +authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +name = "custom-web-api", +kind = "customWebApi", +}, +new +{ +amlParameters = new +{ +uri = "https://my-custom-endpoint.org/", +resourceId = "aml resource id", +timeout = "PT1M", +region = "aml region", +modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", +}, +name = "aml", +kind = "aml", +} + }, + compressions = new object[] + { +new +{ +scalarQuantizationParameters = new +{ +quantizedDataType = "int8", +}, +name = "mySQ8", +kind = "scalarQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQC", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQWithoutOriginals", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "discardOriginals", +}, +truncationDimension = 2, +} + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_Create_SearchServiceCreateIndex_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { +new +{ +name = "id", +type = "Edm.String", +key = true, +sortable = true, +}, +new +{ +name = "vector1", +retrievable = true, +searchable = true, +dimensions = 20, +vectorSearchProfile = "config1", +}, +new +{ +name = "vector1b", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector2", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector3", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector22", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector4", +retrievable = true, +searchable = true, +dimensions = 32, +vectorSearchProfile = "config4", +}, +new +{ +name = "name", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "description", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "standard.lucene", +}, +new +{ +name = "category", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "ownerId", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "price", +type = "Edm.Double", +retrievable = true, +filterable = true, +sortable = true, +facetable = true, +} + }, + ["scoringProfiles"] = new object[] + { +new +{ +name = "stringFieldBoost", +text = new +{ +weights = new +{ +name = 3, +description = 1, +category = 2, +ownerId = 1, +}, +}, +functions = new object[] +{ +new +{ +tag = new +{ +tagsParameter = "categoryTag", +}, +type = "tag", +fieldName = "category", +boost = 2, +} +}, +} + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { +"https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { +new +{ +name = "sg", +searchMode = "analyzingInfixMatching", +sourceFields = new object[] +{ +"category", +"ownerId" +}, +} + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { +new +{ +name = "testconfig", +prioritizedFields = new +{ +titleField = new +{ +fieldName = "category", +}, +prioritizedContentFields = new object[] +{ +new +{ +fieldName = "description", +} +}, +prioritizedKeywordsFields = new object[] +{ +new +{ +fieldName = "ownerId", +} +}, +}, +flightingOptIn = true, +} + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { +new +{ +name = "config1", +algorithm = "cosine", +vectorizer = "openai", +compression = "mySQ8", +}, +new +{ +name = "config2", +algorithm = "euclidean", +vectorizer = "custom-web-api", +compression = "mySQ8", +}, +new +{ +name = "config3", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQC", +}, +new +{ +name = "config4", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQWithoutOriginals", +} + }, + algorithms = new object[] + { +new +{ +hnswParameters = new +{ +metric = "cosine", +}, +name = "cosine", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "euclidean", +}, +name = "euclidean", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "dotProduct", +}, +name = "dotProduct", +kind = "hnsw", +} + }, + vectorizers = new object[] + { +new +{ +azureOpenAIParameters = new +{ +resourceUri = "https://test-sample.openai.azure.com/", +deploymentId = "model", +apiKey = "api-key", +modelName = "text-embedding-3-large", +}, +name = "openai", +kind = "azureOpenAI", +}, +new +{ +customWebApiParameters = new +{ +uri = "https://my-custom-endpoint.org/", +httpHeaders = new +{ +header1 = "value1", +header2 = "value2", +}, +httpMethod = "POST", +timeout = "PT1M", +authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +name = "custom-web-api", +kind = "customWebApi", +}, +new +{ +amlParameters = new +{ +uri = "https://my-custom-endpoint.org/", +resourceId = "aml resource id", +timeout = "PT1M", +region = "aml region", +modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", +}, +name = "aml", +kind = "aml", +} + }, + compressions = new object[] + { +new +{ +scalarQuantizationParameters = new +{ +quantizedDataType = "int8", +}, +name = "mySQ8", +kind = "scalarQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQC", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQWithoutOriginals", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "discardOriginals", +}, +truncationDimension = 2, +} + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_Create_SearchServiceCreateIndex_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] + { +new SearchField("id", SearchFieldDataType.String) +{ +Key = true, +Sortable = true, +}, +new SearchField("vector1", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 20, +VectorSearchProfileName = "config1", +}, +new SearchField("vector1b", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector2", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector3", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector22", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector4", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 32, +VectorSearchProfileName = "config4", +}, +new SearchField("name", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("description", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.StandardLucene, +}, +new SearchField("category", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("ownerId", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("price", SearchFieldDataType.Double) +{ +Retrievable = true, +Filterable = true, +Sortable = true, +Facetable = true, +} + }) + { + ScoringProfiles = {new ScoringProfile("stringFieldBoost") +{ +TextWeights = new TextWeights(new Dictionary +{ +["name"] = 3, +["description"] = 1, +["category"] = 2, +["ownerId"] = 1 +}), +Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, +}}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields +{ +TitleField = new SemanticField("category"), +ContentFields = {new SemanticField("description")}, +KeywordsFields = {new SemanticField("ownerId")}, +}) +{ +FlightingOptIn = true, +}}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") +{ +VectorizerName = "openai", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config2", "euclidean") +{ +VectorizerName = "custom-web-api", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config3", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQC", +}, new VectorSearchProfile("config4", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQWithoutOriginals", +}}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Cosine, +}, +}, new HnswAlgorithmConfiguration("euclidean") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Euclidean, +}, +}, new HnswAlgorithmConfiguration("dotProduct") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.DotProduct, +}, +}}, + Vectorizers = {new AzureOpenAIVectorizer("openai") +{ +Parameters = new AzureOpenAIVectorizerParameters +{ +ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), +DeploymentName = "model", +ApiKey = "api-key", +ModelName = AzureOpenAIModelName.TextEmbedding3Large, +}, +}, new WebApiVectorizer("custom-web-api") +{ +WebApiParameters = new WebApiVectorizerParameters +{ +Url = new Uri("https://my-custom-endpoint.org/"), +HttpHeaders = +{ +["header1"] = "value1", +["header2"] = "value2" +}, +HttpMethod = "POST", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +}, new AzureMachineLearningVectorizer("aml") +{ +AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) +{ +ResourceId = "aml resource id", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +Region = "aml region", +ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, +}, +}}, + Compressions = {new ScalarQuantizationCompression("mySQ8") +{ +Parameters = new ScalarQuantizationParameters +{ +QuantizedDataType = VectorSearchCompressionTarget.Int8, +}, +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQC") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQWithoutOriginals") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, +}, +TruncationDimension = 2, +}}, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = client.Create(index); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_Create_SearchServiceCreateIndex_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] + { +new SearchField("id", SearchFieldDataType.String) +{ +Key = true, +Sortable = true, +}, +new SearchField("vector1", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 20, +VectorSearchProfileName = "config1", +}, +new SearchField("vector1b", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector2", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector3", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector22", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector4", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 32, +VectorSearchProfileName = "config4", +}, +new SearchField("name", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("description", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.StandardLucene, +}, +new SearchField("category", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("ownerId", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("price", SearchFieldDataType.Double) +{ +Retrievable = true, +Filterable = true, +Sortable = true, +Facetable = true, +} + }) + { + ScoringProfiles = {new ScoringProfile("stringFieldBoost") +{ +TextWeights = new TextWeights(new Dictionary +{ +["name"] = 3, +["description"] = 1, +["category"] = 2, +["ownerId"] = 1 +}), +Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, +}}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields +{ +TitleField = new SemanticField("category"), +ContentFields = {new SemanticField("description")}, +KeywordsFields = {new SemanticField("ownerId")}, +}) +{ +FlightingOptIn = true, +}}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") +{ +VectorizerName = "openai", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config2", "euclidean") +{ +VectorizerName = "custom-web-api", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config3", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQC", +}, new VectorSearchProfile("config4", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQWithoutOriginals", +}}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Cosine, +}, +}, new HnswAlgorithmConfiguration("euclidean") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Euclidean, +}, +}, new HnswAlgorithmConfiguration("dotProduct") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.DotProduct, +}, +}}, + Vectorizers = {new AzureOpenAIVectorizer("openai") +{ +Parameters = new AzureOpenAIVectorizerParameters +{ +ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), +DeploymentName = "model", +ApiKey = "api-key", +ModelName = AzureOpenAIModelName.TextEmbedding3Large, +}, +}, new WebApiVectorizer("custom-web-api") +{ +WebApiParameters = new WebApiVectorizerParameters +{ +Url = new Uri("https://my-custom-endpoint.org/"), +HttpHeaders = +{ +["header1"] = "value1", +["header2"] = "value2" +}, +HttpMethod = "POST", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +}, new AzureMachineLearningVectorizer("aml") +{ +AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) +{ +ResourceId = "aml resource id", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +Region = "aml region", +ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, +}, +}}, + Compressions = {new ScalarQuantizationCompression("mySQ8") +{ +Parameters = new ScalarQuantizationParameters +{ +QuantizedDataType = VectorSearchCompressionTarget.Int8, +}, +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQC") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQWithoutOriginals") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, +}, +TruncationDimension = 2, +}}, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateAsync(index); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_CreateOrUpdate_SearchServiceCreateOrUpdateIndex() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { +new +{ +name = "id", +type = "Edm.String", +key = true, +sortable = true, +}, +new +{ +name = "vector1", +retrievable = true, +searchable = true, +dimensions = 20, +vectorSearchProfile = "config1", +}, +new +{ +name = "vector1b", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector2", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector3", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector22", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector4", +retrievable = true, +searchable = true, +dimensions = 32, +vectorSearchProfile = "config4", +}, +new +{ +name = "name", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "description", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "standard.lucene", +}, +new +{ +name = "category", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "ownerId", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "price", +type = "Edm.Double", +retrievable = true, +filterable = true, +sortable = true, +facetable = true, +} + }, + ["scoringProfiles"] = new object[] + { +new +{ +name = "stringFieldBoost", +text = new +{ +weights = new +{ +name = 3, +description = 1, +category = 2, +ownerId = 1, +}, +}, +functions = new object[] +{ +new +{ +tag = new +{ +tagsParameter = "categoryTag", +}, +type = "tag", +fieldName = "category", +boost = 2, +} +}, +} + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { +"https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { +new +{ +name = "sg", +searchMode = "analyzingInfixMatching", +sourceFields = new object[] +{ +"category", +"ownerId" +}, +} + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { +new +{ +name = "testconfig", +prioritizedFields = new +{ +titleField = new +{ +fieldName = "category", +}, +prioritizedContentFields = new object[] +{ +new +{ +fieldName = "description", +} +}, +prioritizedKeywordsFields = new object[] +{ +new +{ +fieldName = "ownerId", +} +}, +}, +flightingOptIn = true, +} + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { +new +{ +name = "config1", +algorithm = "cosine", +vectorizer = "openai", +compression = "mySQ8", +}, +new +{ +name = "config2", +algorithm = "euclidean", +vectorizer = "custom-web-api", +compression = "mySQ8", +}, +new +{ +name = "config3", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQC", +}, +new +{ +name = "config4", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQWithoutOriginals", +} + }, + algorithms = new object[] + { +new +{ +hnswParameters = new +{ +metric = "cosine", +}, +name = "cosine", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "euclidean", +}, +name = "euclidean", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "dotProduct", +}, +name = "dotProduct", +kind = "hnsw", +} + }, + vectorizers = new object[] + { +new +{ +azureOpenAIParameters = new +{ +resourceUri = "https://test-sample.openai.azure.com/", +deploymentId = "model", +apiKey = "api-key", +modelName = "text-embedding-3-large", +}, +name = "openai", +kind = "azureOpenAI", +}, +new +{ +customWebApiParameters = new +{ +uri = "https://my-custom-endpoint.org/", +httpHeaders = new +{ +header1 = "value1", +header2 = "value2", +}, +httpMethod = "POST", +timeout = "PT1M", +authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +name = "custom-web-api", +kind = "customWebApi", +}, +new +{ +amlParameters = new +{ +uri = "https://my-custom-endpoint.org/", +resourceId = "aml resource id", +timeout = "PT1M", +region = "aml region", +modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", +}, +name = "aml", +kind = "aml", +} + }, + compressions = new object[] + { +new +{ +scalarQuantizationParameters = new +{ +quantizedDataType = "int8", +}, +name = "mySQ8", +kind = "scalarQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQC", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQWithoutOriginals", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "discardOriginals", +}, +truncationDimension = 2, +} + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.CreateOrUpdate("temp-preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_CreateOrUpdate_SearchServiceCreateOrUpdateIndex_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "temp-preview-test", + ["fields"] = new object[] + { +new +{ +name = "id", +type = "Edm.String", +key = true, +sortable = true, +}, +new +{ +name = "vector1", +retrievable = true, +searchable = true, +dimensions = 20, +vectorSearchProfile = "config1", +}, +new +{ +name = "vector1b", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector2", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector3", +retrievable = true, +searchable = true, +dimensions = 5, +vectorSearchProfile = "config3", +}, +new +{ +name = "vector22", +retrievable = true, +searchable = true, +dimensions = 10, +vectorSearchProfile = "config2", +}, +new +{ +name = "vector4", +retrievable = true, +searchable = true, +dimensions = 32, +vectorSearchProfile = "config4", +}, +new +{ +name = "name", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "description", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "standard.lucene", +}, +new +{ +name = "category", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "ownerId", +type = "Edm.String", +retrievable = true, +searchable = true, +filterable = true, +sortable = true, +facetable = true, +analyzer = "en.lucene", +}, +new +{ +name = "price", +type = "Edm.Double", +retrievable = true, +filterable = true, +sortable = true, +facetable = true, +} + }, + ["scoringProfiles"] = new object[] + { +new +{ +name = "stringFieldBoost", +text = new +{ +weights = new +{ +name = 3, +description = 1, +category = 2, +ownerId = 1, +}, +}, +functions = new object[] +{ +new +{ +tag = new +{ +tagsParameter = "categoryTag", +}, +type = "tag", +fieldName = "category", +boost = 2, +} +}, +} + }, + ["defaultScoringProfile"] = "stringFieldBoost", + ["corsOptions"] = new + { + allowedOrigins = new object[] + { +"https://www.example.com/foo" + }, + maxAgeInSeconds = 10L, + }, + ["suggesters"] = new object[] + { +new +{ +name = "sg", +searchMode = "analyzingInfixMatching", +sourceFields = new object[] +{ +"category", +"ownerId" +}, +} + }, + ["analyzers"] = Array.Empty(), + ["tokenizers"] = Array.Empty(), + ["tokenFilters"] = Array.Empty(), + ["charFilters"] = Array.Empty(), + ["normalizers"] = Array.Empty(), + ["semantic"] = new + { + defaultConfiguration = "testconfig", + configurations = new object[] + { +new +{ +name = "testconfig", +prioritizedFields = new +{ +titleField = new +{ +fieldName = "category", +}, +prioritizedContentFields = new object[] +{ +new +{ +fieldName = "description", +} +}, +prioritizedKeywordsFields = new object[] +{ +new +{ +fieldName = "ownerId", +} +}, +}, +flightingOptIn = true, +} + }, + }, + ["vectorSearch"] = new + { + profiles = new object[] + { +new +{ +name = "config1", +algorithm = "cosine", +vectorizer = "openai", +compression = "mySQ8", +}, +new +{ +name = "config2", +algorithm = "euclidean", +vectorizer = "custom-web-api", +compression = "mySQ8", +}, +new +{ +name = "config3", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQC", +}, +new +{ +name = "config4", +algorithm = "dotProduct", +vectorizer = "custom-web-api", +compression = "myBQWithoutOriginals", +} + }, + algorithms = new object[] + { +new +{ +hnswParameters = new +{ +metric = "cosine", +}, +name = "cosine", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "euclidean", +}, +name = "euclidean", +kind = "hnsw", +}, +new +{ +hnswParameters = new +{ +metric = "dotProduct", +}, +name = "dotProduct", +kind = "hnsw", +} + }, + vectorizers = new object[] + { +new +{ +azureOpenAIParameters = new +{ +resourceUri = "https://test-sample.openai.azure.com/", +deploymentId = "model", +apiKey = "api-key", +modelName = "text-embedding-3-large", +}, +name = "openai", +kind = "azureOpenAI", +}, +new +{ +customWebApiParameters = new +{ +uri = "https://my-custom-endpoint.org/", +httpHeaders = new +{ +header1 = "value1", +header2 = "value2", +}, +httpMethod = "POST", +timeout = "PT1M", +authResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +name = "custom-web-api", +kind = "customWebApi", +}, +new +{ +amlParameters = new +{ +uri = "https://my-custom-endpoint.org/", +resourceId = "aml resource id", +timeout = "PT1M", +region = "aml region", +modelName = "OpenAI-CLIP-Image-Text-Embeddings-vit-base-patch32", +}, +name = "aml", +kind = "aml", +} + }, + compressions = new object[] + { +new +{ +scalarQuantizationParameters = new +{ +quantizedDataType = "int8", +}, +name = "mySQ8", +kind = "scalarQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQC", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "preserveOriginals", +}, +truncationDimension = 2, +}, +new +{ +name = "myBQWithoutOriginals", +kind = "binaryQuantization", +rescoringOptions = new +{ +enableRescoring = true, +defaultOversampling = 10, +rescoreStorageMethod = "discardOriginals", +}, +truncationDimension = 2, +} + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateOrUpdateAsync("temp-preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_CreateOrUpdate_SearchServiceCreateOrUpdateIndex_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] + { +new SearchField("id", SearchFieldDataType.String) +{ +Key = true, +Sortable = true, +}, +new SearchField("vector1", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 20, +VectorSearchProfileName = "config1", +}, +new SearchField("vector1b", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector2", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector3", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector22", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector4", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 32, +VectorSearchProfileName = "config4", +}, +new SearchField("name", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("description", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.StandardLucene, +}, +new SearchField("category", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("ownerId", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("price", SearchFieldDataType.Double) +{ +Retrievable = true, +Filterable = true, +Sortable = true, +Facetable = true, +} + }) + { + ScoringProfiles = {new ScoringProfile("stringFieldBoost") +{ +TextWeights = new TextWeights(new Dictionary +{ +["name"] = 3, +["description"] = 1, +["category"] = 2, +["ownerId"] = 1 +}), +Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, +}}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields +{ +TitleField = new SemanticField("category"), +ContentFields = {new SemanticField("description")}, +KeywordsFields = {new SemanticField("ownerId")}, +}) +{ +FlightingOptIn = true, +}}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") +{ +VectorizerName = "openai", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config2", "euclidean") +{ +VectorizerName = "custom-web-api", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config3", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQC", +}, new VectorSearchProfile("config4", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQWithoutOriginals", +}}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Cosine, +}, +}, new HnswAlgorithmConfiguration("euclidean") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Euclidean, +}, +}, new HnswAlgorithmConfiguration("dotProduct") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.DotProduct, +}, +}}, + Vectorizers = {new AzureOpenAIVectorizer("openai") +{ +Parameters = new AzureOpenAIVectorizerParameters +{ +ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), +DeploymentName = "model", +ApiKey = "api-key", +ModelName = AzureOpenAIModelName.TextEmbedding3Large, +}, +}, new WebApiVectorizer("custom-web-api") +{ +WebApiParameters = new WebApiVectorizerParameters +{ +Url = new Uri("https://my-custom-endpoint.org/"), +HttpHeaders = +{ +["header1"] = "value1", +["header2"] = "value2" +}, +HttpMethod = "POST", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +}, new AzureMachineLearningVectorizer("aml") +{ +AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) +{ +ResourceId = "aml resource id", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +Region = "aml region", +ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, +}, +}}, + Compressions = {new ScalarQuantizationCompression("mySQ8") +{ +Parameters = new ScalarQuantizationParameters +{ +QuantizedDataType = VectorSearchCompressionTarget.Int8, +}, +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQC") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQWithoutOriginals") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, +}, +TruncationDimension = 2, +}}, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = client.CreateOrUpdate("temp-preview-test", index); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_CreateOrUpdate_SearchServiceCreateOrUpdateIndex_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + SearchIndex index = new SearchIndex("temp-preview-test", new SearchField[] + { +new SearchField("id", SearchFieldDataType.String) +{ +Key = true, +Sortable = true, +}, +new SearchField("vector1", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 20, +VectorSearchProfileName = "config1", +}, +new SearchField("vector1b", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector2", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector3", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 5, +VectorSearchProfileName = "config3", +}, +new SearchField("vector22", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 10, +VectorSearchProfileName = "config2", +}, +new SearchField("vector4", default) +{ +Retrievable = true, +Searchable = true, +VectorSearchDimensions = 32, +VectorSearchProfileName = "config4", +}, +new SearchField("name", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("description", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.StandardLucene, +}, +new SearchField("category", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("ownerId", SearchFieldDataType.String) +{ +Retrievable = true, +Searchable = true, +Filterable = true, +Sortable = true, +Facetable = true, +Analyzer = LexicalAnalyzerName.EnLucene, +}, +new SearchField("price", SearchFieldDataType.Double) +{ +Retrievable = true, +Filterable = true, +Sortable = true, +Facetable = true, +} + }) + { + ScoringProfiles = {new ScoringProfile("stringFieldBoost") +{ +TextWeights = new TextWeights(new Dictionary +{ +["name"] = 3, +["description"] = 1, +["category"] = 2, +["ownerId"] = 1 +}), +Functions = {new TagScoringFunction("category", 2, new TagScoringParameters("categoryTag"))}, +}}, + DefaultScoringProfile = "stringFieldBoost", + CorsOptions = new CorsOptions(new string[] { "https://www.example.com/foo" }) + { + MaxAgeInSeconds = 10L, + }, + Suggesters = { new SearchSuggester("sg", new string[] { "category", "ownerId" }) }, + Analyzers = { }, + Tokenizers = { }, + TokenFilters = { }, + CharFilters = { }, + Normalizers = { }, + SemanticSearch = new SemanticSearch + { + DefaultConfigurationName = "testconfig", + Configurations = {new SemanticConfiguration("testconfig", new SemanticPrioritizedFields +{ +TitleField = new SemanticField("category"), +ContentFields = {new SemanticField("description")}, +KeywordsFields = {new SemanticField("ownerId")}, +}) +{ +FlightingOptIn = true, +}}, + }, + VectorSearch = new VectorSearch + { + Profiles = {new VectorSearchProfile("config1", "cosine") +{ +VectorizerName = "openai", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config2", "euclidean") +{ +VectorizerName = "custom-web-api", +CompressionName = "mySQ8", +}, new VectorSearchProfile("config3", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQC", +}, new VectorSearchProfile("config4", "dotProduct") +{ +VectorizerName = "custom-web-api", +CompressionName = "myBQWithoutOriginals", +}}, + Algorithms = {new HnswAlgorithmConfiguration("cosine") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Cosine, +}, +}, new HnswAlgorithmConfiguration("euclidean") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.Euclidean, +}, +}, new HnswAlgorithmConfiguration("dotProduct") +{ +Parameters = new HnswParameters +{ +Metric = VectorSearchAlgorithmMetric.DotProduct, +}, +}}, + Vectorizers = {new AzureOpenAIVectorizer("openai") +{ +Parameters = new AzureOpenAIVectorizerParameters +{ +ResourceUrl = new Uri("https://test-sample.openai.azure.com/"), +DeploymentName = "model", +ApiKey = "api-key", +ModelName = AzureOpenAIModelName.TextEmbedding3Large, +}, +}, new WebApiVectorizer("custom-web-api") +{ +WebApiParameters = new WebApiVectorizerParameters +{ +Url = new Uri("https://my-custom-endpoint.org/"), +HttpHeaders = +{ +["header1"] = "value1", +["header2"] = "value2" +}, +HttpMethod = "POST", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +AuthResourceId = "api://f89d1c93-58a7-4b07-9a5b-5f89048b927b", +}, +}, new AzureMachineLearningVectorizer("aml") +{ +AMLParameters = new AzureMachineLearningParameters(new Uri("https://my-custom-endpoint.org/")) +{ +ResourceId = "aml resource id", +Timeout = XmlConvert.ToTimeSpan("PT1M"), +Region = "aml region", +ModelName = AIFoundryModelCatalogName.OpenAICLIPImageTextEmbeddingsVitBasePatch32, +}, +}}, + Compressions = {new ScalarQuantizationCompression("mySQ8") +{ +Parameters = new ScalarQuantizationParameters +{ +QuantizedDataType = VectorSearchCompressionTarget.Int8, +}, +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQC") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.PreserveOriginals, +}, +TruncationDimension = 2, +}, new BinaryQuantizationCompression("myBQWithoutOriginals") +{ +RescoringOptions = new RescoringOptions +{ +EnableRescoring = true, +DefaultOversampling = 10, +RescoreStorageMethod = VectorSearchCompressionRescoreStorageMethod.DiscardOriginals, +}, +TruncationDimension = 2, +}}, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateOrUpdateAsync("temp-preview-test", index); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_Delete_SearchServiceDeleteIndex() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = client.Delete("temp-preview-test"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_Delete_SearchServiceDeleteIndex_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = await client.DeleteAsync("temp-preview-test"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetIndex_SearchServiceGetIndex() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = client.GetIndex("preview-test", (RequestContext)null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetIndex_SearchServiceGetIndex_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = await client.GetIndexAsync("preview-test", (RequestContext)null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetIndex_SearchServiceGetIndex_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = client.GetIndex("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetIndex_SearchServiceGetIndex_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = await client.GetIndexAsync("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetStatistics_SearchServiceGetIndexStatistics() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = client.GetStatistics("preview-test", (RequestContext)null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); + Console.WriteLine(result.GetProperty("vectorIndexSize").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetStatistics_SearchServiceGetIndexStatistics_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = await client.GetStatisticsAsync("preview-test", (RequestContext)null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); + Console.WriteLine(result.GetProperty("vectorIndexSize").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetStatistics_SearchServiceGetIndexStatistics_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = client.GetStatistics("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetStatistics_SearchServiceGetIndexStatistics_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Response response = await client.GetStatisticsAsync("preview-test"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_Analyze_SearchServiceIndexAnalyze() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new + { + text = "Text to analyze", + analyzer = "ar.lucene", + }); + Response response = client.Analyze("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("token").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("startOffset").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("endOffset").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("position").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_Analyze_SearchServiceIndexAnalyze_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + using RequestContent content = RequestContent.Create(new + { + text = "Text to analyze", + analyzer = "ar.lucene", + }); + Response response = await client.AnalyzeAsync("preview-test", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("token").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("startOffset").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("endOffset").ToString()); + Console.WriteLine(result.GetProperty("tokens")[0].GetProperty("position").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_Analyze_SearchServiceIndexAnalyze_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Search.Documents.Indexes.Models.AnalyzeTextOptions request = new Search.Documents.Indexes.Models.AnalyzeTextOptions("Text to analyze"); + Response response = client.Analyze("preview-test", request); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_Analyze_SearchServiceIndexAnalyze_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + Search.Documents.Indexes.Models.AnalyzeTextOptions request = new Search.Documents.Indexes.Models.AnalyzeTextOptions("Text to analyze"); + Response response = await client.AnalyzeAsync("preview-test", request); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetIndexes_SearchServiceListIndexes() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + foreach (BinaryData item in client.GetIndexes((string)null, (RequestContext)null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetIndexes_SearchServiceListIndexes_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + await foreach (BinaryData item in client.GetIndexesAsync((string)null, (RequestContext)null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("fields")[0].GetProperty("type").ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Indexes_GetIndexes_SearchServiceListIndexes_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + foreach (SearchIndex item in client.GetIndexes()) + { + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Indexes_GetIndexes_SearchServiceListIndexes_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Search.Documents.Indexes client = new SearchClient(endpoint, credential).GetIndexesClient(); + + await foreach (SearchIndex item in client.GetIndexesAsync()) + { + } + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SearchClient.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SearchClient.cs new file mode 100644 index 000000000000..689d08548913 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SearchClient.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_SearchClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Search_GetServiceStatistics_SearchServiceGetServiceStatistics() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + Response response = client.GetServiceStatistics(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("counters").GetProperty("aliasesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("documentCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("indexesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("indexersCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("dataSourcesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("storageSize").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("synonymMaps").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("skillsetCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("vectorIndexSize").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("limits").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Search_GetServiceStatistics_SearchServiceGetServiceStatistics_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + Response response = await client.GetServiceStatisticsAsync(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("counters").GetProperty("aliasesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("documentCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("indexesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("indexersCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("dataSourcesCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("storageSize").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("synonymMaps").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("skillsetCount").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("counters").GetProperty("vectorIndexSize").GetProperty("usage").ToString()); + Console.WriteLine(result.GetProperty("limits").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Search_GetServiceStatistics_SearchServiceGetServiceStatistics_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + Response response = client.GetServiceStatistics(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Search_GetServiceStatistics_SearchServiceGetServiceStatistics_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + Response response = await client.GetServiceStatisticsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Search_GetIndexStatsSummary_SearchServiceGetIndexStatsSummary() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + foreach (BinaryData item in client.GetIndexStatsSummary(null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Search_GetIndexStatsSummary_SearchServiceGetIndexStatsSummary_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + await foreach (BinaryData item in client.GetIndexStatsSummaryAsync(null)) + { + JsonElement result = JsonDocument.Parse(item.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("documentCount").ToString()); + Console.WriteLine(result.GetProperty("storageSize").ToString()); + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Search_GetIndexStatsSummary_SearchServiceGetIndexStatsSummary_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + foreach (IndexStatisticsSummary item in client.GetIndexStatsSummary()) + { + } + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Search_GetIndexStatsSummary_SearchServiceGetIndexStatsSummary_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SearchClient client = new SearchClient(endpoint, credential); + + await foreach (IndexStatisticsSummary item in client.GetIndexStatsSummaryAsync()) + { + } + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Skillsets.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Skillsets.cs new file mode 100644 index 000000000000..11b8c581fa3e --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_Skillsets.cs @@ -0,0 +1,463 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using Azure.Search.Documents.Models; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_Skillsets + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_CreateOrUpdate_SearchServiceCreateOrUpdateSkillset() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = client.CreateOrUpdate("tempskillset", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_CreateOrUpdate_SearchServiceCreateOrUpdateSkillset_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = await client.CreateOrUpdateAsync("tempskillset", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_CreateOrUpdate_SearchServiceCreateOrUpdateSkillset_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) + { + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = client.CreateOrUpdate("tempskillset", skillset); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_CreateOrUpdate_SearchServiceCreateOrUpdateSkillset_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) + { + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = await client.CreateOrUpdateAsync("tempskillset", skillset); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_Delete_SearchServiceDeleteSkillset() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = client.Delete("tempskillset"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_Delete_SearchServiceDeleteSkillset_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = await client.DeleteAsync("tempskillset"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_GetSkillset_SearchServiceGetSkillset() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = client.GetSkillset("myskillset", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_GetSkillset_SearchServiceGetSkillset_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = await client.GetSkillsetAsync("myskillset", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_GetSkillset_SearchServiceGetSkillset_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = client.GetSkillset("myskillset"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_GetSkillset_SearchServiceGetSkillset_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = await client.GetSkillsetAsync("myskillset"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_GetSkillsets_SearchServiceListSkillsets() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = client.GetSkillsets(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_GetSkillsets_SearchServiceListSkillsets_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = await client.GetSkillsetsAsync(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_GetSkillsets_SearchServiceListSkillsets_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = client.GetSkillsets(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_GetSkillsets_SearchServiceListSkillsets_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + Response response = await client.GetSkillsetsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_Create_SearchServiceCreateSkillset() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_Create_SearchServiceCreateSkillset_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempskillset", + ["description"] = "Skillset for extracting entities and more", + ["skills"] = Array.Empty(), + ["@odata.etag"] = "0x1234568AE7E58A1", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + } + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("@odata.type").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("inputs")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("skills")[0].GetProperty("outputs")[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_Create_SearchServiceCreateSkillset_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) + { + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = client.Create(skillset); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_Create_SearchServiceCreateSkillset_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + SearchIndexerSkillset skillset = new SearchIndexerSkillset("tempskillset", Array.Empty()) + { + Description = "Skillset for extracting entities and more", + ETag = "0x1234568AE7E58A1", + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + }; + Response response = await client.CreateAsync(skillset); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_ResetSkills_SearchServiceResetSkills() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new + { + skillNames = new object[] + { +"skill2", +"skill3", +"skill4" + }, + }); + Response response = client.ResetSkills("myskillset", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_ResetSkills_SearchServiceResetSkills_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + using RequestContent content = RequestContent.Create(new + { + skillNames = new object[] + { +"skill2", +"skill3", +"skill4" + }, + }); + Response response = await client.ResetSkillsAsync("myskillset", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_Skillsets_ResetSkills_SearchServiceResetSkills_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + ResetSkillsOptions resetSkillsOptions = null; + Response response = client.ResetSkills("myskillset", resetSkillsOptions); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_Skillsets_ResetSkills_SearchServiceResetSkills_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + Skillsets client = new SearchClient(endpoint, credential).GetSkillsetsClient(); + + ResetSkillsOptions resetSkillsOptions = null; + Response response = await client.ResetSkillsAsync("myskillset", resetSkillsOptions); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SynonymMaps.cs b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SynonymMaps.cs new file mode 100644 index 000000000000..2e88414c3485 --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tests/Generated/Samples/Samples_SynonymMaps.cs @@ -0,0 +1,382 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.Search.Documents.Samples +{ + public partial class Samples_SynonymMaps + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_CreateOrUpdate_SearchServiceCreateOrUpdateSynonymMap() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "mysynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.CreateOrUpdate("mysynonymmap", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_CreateOrUpdate_SearchServiceCreateOrUpdateSynonymMap_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "mysynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateOrUpdateAsync("mysynonymmap", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_CreateOrUpdate_SearchServiceCreateOrUpdateSynonymMap_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + SynonymMap synonymMap = new SynonymMap("mysynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") + { + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = client.CreateOrUpdate("mysynonymmap", synonymMap); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_CreateOrUpdate_SearchServiceCreateOrUpdateSynonymMap_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + SynonymMap synonymMap = new SynonymMap("mysynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") + { + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateOrUpdateAsync("mysynonymmap", synonymMap); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_Delete_SearchServiceDeleteSynonymMap() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = client.Delete("tempsynonymmap"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_Delete_SearchServiceDeleteSynonymMap_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = await client.DeleteAsync("tempsynonymmap"); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_GetSynonymMap_SearchServiceGetSynonymMap() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = client.GetSynonymMap("mysynonymmap", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_GetSynonymMap_SearchServiceGetSynonymMap_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = await client.GetSynonymMapAsync("mysynonymmap", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_GetSynonymMap_SearchServiceGetSynonymMap_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = client.GetSynonymMap("mysynonymmap"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_GetSynonymMap_SearchServiceGetSynonymMap_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = await client.GetSynonymMapAsync("mysynonymmap"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_GetSynonymMaps_SearchServiceListSynonymMaps() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = client.GetSynonymMaps(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_GetSynonymMaps_SearchServiceListSynonymMaps_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = await client.GetSynonymMapsAsync(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("value")[0].GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("value")[0].GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_GetSynonymMaps_SearchServiceListSynonymMaps_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = client.GetSynonymMaps(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_GetSynonymMaps_SearchServiceListSynonymMaps_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + Response response = await client.GetSynonymMapsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_Create_SearchServiceCreateSynonymMap() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempsynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = client.Create(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_Create_SearchServiceCreateSynonymMap_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + using RequestContent content = RequestContent.Create(new Dictionary + { + ["name"] = "tempsynonymmap", + ["format"] = "solr", + ["synonyms"] = "United States, United States of America, USA\nWashington, Wash. => WA", + ["encryptionKey"] = new + { + keyVaultKeyName = "myUserManagedEncryptionKey-createdinAzureKeyVault", + keyVaultKeyVersion = "myKeyVersion-32charAlphaNumericString", + keyVaultUri = "https://myKeyVault.vault.azure.net", + accessCredentials = new + { + applicationId = "00000000-0000-0000-0000-000000000000", + applicationSecret = "", + }, + }, + ["@odata.etag"] = "0x1234568AE7E58A1" + }); + Response response = await client.CreateAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("format").ToString()); + Console.WriteLine(result.GetProperty("synonyms").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_SynonymMaps_Create_SearchServiceCreateSynonymMap_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + SynonymMap synonymMap = new SynonymMap("tempsynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") + { + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = client.Create(synonymMap); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_SynonymMaps_Create_SearchServiceCreateSynonymMap_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + SynonymMaps client = new SearchClient(endpoint, credential).GetSynonymMapsClient(); + + SynonymMap synonymMap = new SynonymMap("tempsynonymmap", "United States, United States of America, USA\nWashington, Wash. => WA") + { + EncryptionKey = new SearchResourceEncryptionKey("myUserManagedEncryptionKey-createdinAzureKeyVault", "https://myKeyVault.vault.azure.net") + { + KeyVersion = "myKeyVersion-32charAlphaNumericString", + AccessCredentials = new AzureActiveDirectoryApplicationCredentials("00000000-0000-0000-0000-000000000000") + { + ApplicationSecret = "", + }, + }, + ETag = "0x1234568AE7E58A1", + }; + Response response = await client.CreateAsync(synonymMap); + } + } +} diff --git a/sdk/search/Azure.Search.Documents/tsp-location.yaml b/sdk/search/Azure.Search.Documents/tsp-location.yaml new file mode 100644 index 000000000000..e07f50a4805b --- /dev/null +++ b/sdk/search/Azure.Search.Documents/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/search/Azure.Search +commit: 7974890cb2530ff3ba71fb3ecaa5d76f0d9ed85d +repo: Azure/azure-rest-api-specs +additionalDirectories: