diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs index fdc44b3da4e3..d1ff0bc7e195 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs @@ -12,9 +12,34 @@ namespace Microsoft.Extensions.Azure { - /// Extension methods to add , to client builder. + /// Extension methods to add , , to client builder. public static partial class AIVisionFaceClientBuilderExtensions { + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new FaceAdministrationClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new FaceAdministrationClient(endpoint, cred, options)); + } + /// Registers a instance. /// The builder to register with. /// @@ -65,6 +90,14 @@ public static IAzureClientBuilder((options, cred) => new FaceSessionClient(endpoint, cred, options)); } + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } /// Registers a instance. /// The builder to register with. /// The configuration values. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs index f85eca044688..49e4e7fad35a 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs @@ -14,6 +14,93 @@ namespace Azure.AI.Vision.Face /// Model factory for models. public static partial class AIVisionFaceModelFactory { + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// A new instance for mocking. + public static LargePersonGroup LargePersonGroup(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largePersonGroupId = null) + { + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// A new instance for mocking. + public static FaceTrainingResult FaceTrainingResult(FaceOperationStatus status = default, DateTimeOffset createdDateTime = default, DateTimeOffset lastActionDateTime = default, DateTimeOffset lastSuccessfulTrainingDateTime = default, string message = null) + { + return new FaceTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// A new instance for mocking. + public static CreatePersonResult CreatePersonResult(Guid personId = default) + { + return new CreatePersonResult(personId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// A new instance for mocking. + public static LargePersonGroupPerson LargePersonGroupPerson(Guid personId = default, string name = null, string userData = null, IEnumerable persistedFaceIds = null) + { + persistedFaceIds ??= new List(); + + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// A new instance for mocking. + public static AddFaceResult AddFaceResult(Guid persistedFaceId = default) + { + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargePersonGroupPersonFace LargePersonGroupPersonFace(Guid persistedFaceId = default, string userData = null) + { + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A new instance for mocking. + public static LargeFaceList LargeFaceList(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largeFaceListId = null) + { + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargeFaceListFace LargeFaceListFace(Guid persistedFaceId = default, string userData = null) + { + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. /// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true. @@ -277,19 +364,43 @@ public static FaceGroupingResult FaceGroupingResult(IEnumerable> gro return new FaceGroupingResult(groups?.ToList(), messyGroup?.ToList(), serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// A new instance for mocking. + public static FaceIdentificationResult FaceIdentificationResult(Guid faceId = default, IEnumerable candidates = null) + { + candidates ??= new List(); + + return new FaceIdentificationResult(faceId, candidates?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// A new instance for mocking. + public static FaceIdentificationCandidate FaceIdentificationCandidate(Guid personId = default, float confidence = default) + { + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// Type of liveness mode the client should follow. /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. /// A new instance for mocking. - public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) + public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) { return new CreateLivenessSessionContent( livenessOperationMode, sendResultsToClient, deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, deviceCorrelationId, authTokenTimeToLiveInSeconds, serializedAdditionalRawData: null); @@ -337,8 +448,10 @@ public static LivenessSession LivenessSession(string id = null, DateTimeOffset c /// The request of this entry. /// The response of this entry. /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// The image ID of the session request. + /// The sha256 hash of the verify-image in the request. /// A new instance for mocking. - public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null) + public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null, string sessionImageId = null, string verifyImageHash = null) { return new LivenessSessionAuditEntry( id, @@ -349,6 +462,8 @@ public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = defa request, response, digest, + sessionImageId, + verifyImageHash, serializedAdditionalRawData: null); } @@ -444,6 +559,32 @@ public static LivenessSessionItem LivenessSessionItem(string id = null, DateTime serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Whether or not return the verify image hash. + /// Threshold for confidence of the face verification. + /// A new instance for mocking. + public static CreateLivenessWithVerifySessionContent CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null, bool? returnVerifyImageHash = null, float? verifyConfidenceThreshold = null) + { + return new CreateLivenessWithVerifySessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + returnVerifyImageHash, + verifyConfidenceThreshold, + serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs new file mode 100644 index 000000000000..4068e6fba19a --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Uri.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddFaceFromUrlRequest DeserializeAddFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs new file mode 100644 index 000000000000..f6d64f1102d7 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddFaceFromUrlRequest. + internal partial class AddFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + internal AddFaceFromUrlRequest(Uri uri) + { + Argument.AssertNotNull(uri, nameof(uri)); + + Uri = uri; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddFaceFromUrlRequest(Uri uri, IDictionary serializedAdditionalRawData) + { + Uri = uri; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Uri { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs new file mode 100644 index 000000000000..6afe63d8fb43 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddFaceFromUrlRequest1 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Uri.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceFromUrlRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceFromUrlRequest1(document.RootElement, options); + } + + internal static AddFaceFromUrlRequest1 DeserializeAddFaceFromUrlRequest1(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceFromUrlRequest1(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{options.Format}' format."); + } + } + + AddFaceFromUrlRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceFromUrlRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceFromUrlRequest1 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceFromUrlRequest1(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs new file mode 100644 index 000000000000..2bce74bdbf08 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddFaceFromUrlRequest1. + internal partial class AddFaceFromUrlRequest1 + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + internal AddFaceFromUrlRequest1(Uri uri) + { + Argument.AssertNotNull(uri, nameof(uri)); + + Uri = uri; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddFaceFromUrlRequest1(Uri uri, IDictionary serializedAdditionalRawData) + { + Uri = uri; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceFromUrlRequest1() + { + } + + /// URL of input image. + public Uri Uri { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs new file mode 100644 index 000000000000..716408b25a91 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AddFaceResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceResult(document.RootElement, options); + } + + internal static AddFaceResult DeserializeAddFaceResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{options.Format}' format."); + } + } + + AddFaceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs new file mode 100644 index 000000000000..28ad972fc9d5 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response body for adding face. + public partial class AddFaceResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + internal AddFaceResult(Guid persistedFaceId) + { + PersistedFaceId = persistedFaceId; + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// Keeps track of any properties unknown to the library. + internal AddFaceResult(Guid persistedFaceId, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceResult() + { + } + + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + public Guid PersistedFaceId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs index a8cf7ed227cc..a27f25f2a008 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs @@ -13,13 +13,15 @@ namespace Azure.AI.Vision.Face /// Client options for Azure.AI.Vision.Face library clients. public partial class AzureAIVisionFaceClientOptions : ClientOptions { - private const ServiceVersion LatestVersion = ServiceVersion.V1_1_Preview_1; + private const ServiceVersion LatestVersion = ServiceVersion.V1_2_Preview_1; /// The version of the service to use. public enum ServiceVersion { /// Service version "v1.1-preview.1". V1_1_Preview_1 = 1, + /// Service version "v1.2-preview.1". + V1_2_Preview_1 = 2, } internal string Version { get; } @@ -30,6 +32,7 @@ public AzureAIVisionFaceClientOptions(ServiceVersion version = LatestVersion) Version = version switch { ServiceVersion.V1_1_Preview_1 => "v1.1-preview.1", + ServiceVersion.V1_2_Preview_1 => "v1.2-preview.1", _ => throw new NotSupportedException() }; } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs index 5890a4f74258..b32ad1822811 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs @@ -38,6 +38,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model writer.WritePropertyName("deviceCorrelationIdSetInClient"u8); writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value); } + if (Optional.IsDefined(EnableSessionImage)) + { + writer.WritePropertyName("enableSessionImage"u8); + writer.WriteBooleanValue(EnableSessionImage.Value); + } + if (Optional.IsDefined(LivenessSingleModalModel)) + { + writer.WritePropertyName("livenessSingleModalModel"u8); + writer.WriteStringValue(LivenessSingleModalModel.Value.ToString()); + } if (Optional.IsDefined(DeviceCorrelationId)) { writer.WritePropertyName("deviceCorrelationId"u8); @@ -89,6 +99,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon LivenessOperationMode livenessOperationMode = default; bool? sendResultsToClient = default; bool? deviceCorrelationIdSetInClient = default; + bool? enableSessionImage = default; + LivenessModel? livenessSingleModalModel = default; string deviceCorrelationId = default; int? authTokenTimeToLiveInSeconds = default; IDictionary serializedAdditionalRawData = default; @@ -118,6 +130,24 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon deviceCorrelationIdSetInClient = property.Value.GetBoolean(); continue; } + if (property.NameEquals("enableSessionImage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableSessionImage = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("livenessSingleModalModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + livenessSingleModalModel = new LivenessModel(property.Value.GetString()); + continue; + } if (property.NameEquals("deviceCorrelationId"u8)) { deviceCorrelationId = property.Value.GetString(); @@ -142,6 +172,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon livenessOperationMode, sendResultsToClient, deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, deviceCorrelationId, authTokenTimeToLiveInSeconds, serializedAdditionalRawData); diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs index eb27333cbe93..9de38ee841c6 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs @@ -10,7 +10,7 @@ namespace Azure.AI.Vision.Face { - /// Request for creating liveness session. + /// Request model for creating liveness session. public partial class CreateLivenessSessionContent { /// @@ -56,14 +56,18 @@ public CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode) /// Type of liveness mode the client should follow. /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. /// Keeps track of any properties unknown to the library. - internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) + internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) { LivenessOperationMode = livenessOperationMode; SendResultsToClient = sendResultsToClient; DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient; + EnableSessionImage = enableSessionImage; + LivenessSingleModalModel = livenessSingleModalModel; DeviceCorrelationId = deviceCorrelationId; AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; _serializedAdditionalRawData = serializedAdditionalRawData; @@ -80,6 +84,10 @@ internal CreateLivenessSessionContent() public bool? SendResultsToClient { get; set; } /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. public bool? DeviceCorrelationIdSetInClient { get; set; } + /// Whether or not store the session image. + public bool? EnableSessionImage { get; set; } + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + public LivenessModel? LivenessSingleModalModel { get; set; } /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. public string DeviceCorrelationId { get; set; } /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs index 6b713f68b180..443812ef9cf0 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs @@ -8,13 +8,12 @@ using System; using System.ClientModel.Primitives; using System.Collections.Generic; -using System.IO; using System.Text.Json; using Azure.Core; namespace Azure.AI.Vision.Face { - internal partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel + public partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel { void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); @@ -27,17 +26,48 @@ void IJsonModel.Write(Utf8JsonWriter wri } writer.WriteStartObject(); - writer.WritePropertyName("Parameters"u8); - writer.WriteObjectValue(Parameters, options); - writer.WritePropertyName("VerifyImage"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage)); -#else - using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage))) + writer.WritePropertyName("livenessOperationMode"u8); + writer.WriteStringValue(LivenessOperationMode.ToString()); + if (Optional.IsDefined(SendResultsToClient)) { - JsonSerializer.Serialize(writer, document.RootElement); + writer.WritePropertyName("sendResultsToClient"u8); + writer.WriteBooleanValue(SendResultsToClient.Value); + } + if (Optional.IsDefined(DeviceCorrelationIdSetInClient)) + { + writer.WritePropertyName("deviceCorrelationIdSetInClient"u8); + writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value); + } + if (Optional.IsDefined(EnableSessionImage)) + { + writer.WritePropertyName("enableSessionImage"u8); + writer.WriteBooleanValue(EnableSessionImage.Value); + } + if (Optional.IsDefined(LivenessSingleModalModel)) + { + writer.WritePropertyName("livenessSingleModalModel"u8); + writer.WriteStringValue(LivenessSingleModalModel.Value.ToString()); + } + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + if (Optional.IsDefined(ReturnVerifyImageHash)) + { + writer.WritePropertyName("returnVerifyImageHash"u8); + writer.WriteBooleanValue(ReturnVerifyImageHash.Value); + } + if (Optional.IsDefined(VerifyConfidenceThreshold)) + { + writer.WritePropertyName("verifyConfidenceThreshold"u8); + writer.WriteNumberValue(VerifyConfidenceThreshold.Value); } -#endif if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -76,20 +106,90 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness { return null; } - CreateLivenessSessionContent parameters = default; - Stream verifyImage = default; + LivenessOperationMode livenessOperationMode = default; + bool? sendResultsToClient = default; + bool? deviceCorrelationIdSetInClient = default; + bool? enableSessionImage = default; + LivenessModel? livenessSingleModalModel = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + bool? returnVerifyImageHash = default; + float? verifyConfidenceThreshold = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) { - if (property.NameEquals("Parameters"u8)) + if (property.NameEquals("livenessOperationMode"u8)) + { + livenessOperationMode = new LivenessOperationMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("sendResultsToClient"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sendResultsToClient = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationIdSetInClient"u8)) { - parameters = CreateLivenessSessionContent.DeserializeCreateLivenessSessionContent(property.Value, options); + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + deviceCorrelationIdSetInClient = property.Value.GetBoolean(); continue; } - if (property.NameEquals("VerifyImage"u8)) + if (property.NameEquals("enableSessionImage"u8)) { - verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream(); + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableSessionImage = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("livenessSingleModalModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + livenessSingleModalModel = new LivenessModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("returnVerifyImageHash"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + returnVerifyImageHash = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("verifyConfidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + verifyConfidenceThreshold = property.Value.GetSingle(); continue; } if (options.Format != "W") @@ -98,30 +198,17 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness } } serializedAdditionalRawData = rawDataDictionary; - return new CreateLivenessWithVerifySessionContent(parameters, verifyImage, serializedAdditionalRawData); - } - - private BinaryData SerializeMultipart(ModelReaderWriterOptions options) - { - using MultipartFormDataRequestContent content = ToMultipartRequestContent(); - using MemoryStream stream = new MemoryStream(); - content.WriteTo(stream); - if (stream.Position > int.MaxValue) - { - return BinaryData.FromStream(stream); - } - else - { - return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position)); - } - } - - internal virtual MultipartFormDataRequestContent ToMultipartRequestContent() - { - MultipartFormDataRequestContent content = new MultipartFormDataRequestContent(); - content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters"); - content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream"); - return content; + return new CreateLivenessWithVerifySessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + returnVerifyImageHash, + verifyConfidenceThreshold, + serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) @@ -132,8 +219,6 @@ BinaryData IPersistableModel.Write(Model { case "J": return ModelReaderWriter.Write(this, options); - case "MFD": - return SerializeMultipart(options); default: throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support writing '{options.Format}' format."); } @@ -155,7 +240,7 @@ CreateLivenessWithVerifySessionContent IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; /// Deserializes the model from a raw response. /// The response to deserialize the model from. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs index 5d6d724fdbda..99e890eaf7e3 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs @@ -7,12 +7,11 @@ using System; using System.Collections.Generic; -using System.IO; namespace Azure.AI.Vision.Face { - /// Request of liveness with verify session creation. - internal partial class CreateLivenessWithVerifySessionContent + /// Request for creating liveness with verify session. + public partial class CreateLivenessWithVerifySessionContent { /// /// Keeps track of any properties unknown to the library. @@ -47,26 +46,34 @@ internal partial class CreateLivenessWithVerifySessionContent private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The parameters for creating session. - /// The image stream for verify. Content-Disposition header field for this part must have filename. - /// or is null. - public CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage) + /// Type of liveness mode the client should follow. + public CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode) { - Argument.AssertNotNull(parameters, nameof(parameters)); - Argument.AssertNotNull(verifyImage, nameof(verifyImage)); - - Parameters = parameters; - VerifyImage = verifyImage; + LivenessOperationMode = livenessOperationMode; } /// Initializes a new instance of . - /// The parameters for creating session. - /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Whether or not return the verify image hash. + /// Threshold for confidence of the face verification. /// Keeps track of any properties unknown to the library. - internal CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData) + internal CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, bool? returnVerifyImageHash, float? verifyConfidenceThreshold, IDictionary serializedAdditionalRawData) { - Parameters = parameters; - VerifyImage = verifyImage; + LivenessOperationMode = livenessOperationMode; + SendResultsToClient = sendResultsToClient; + DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient; + EnableSessionImage = enableSessionImage; + LivenessSingleModalModel = livenessSingleModalModel; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + ReturnVerifyImageHash = returnVerifyImageHash; + VerifyConfidenceThreshold = verifyConfidenceThreshold; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -75,9 +82,23 @@ internal CreateLivenessWithVerifySessionContent() { } - /// The parameters for creating session. - public CreateLivenessSessionContent Parameters { get; } - /// The image stream for verify. Content-Disposition header field for this part must have filename. - public Stream VerifyImage { get; } + /// Type of liveness mode the client should follow. + public LivenessOperationMode LivenessOperationMode { get; } + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + public bool? SendResultsToClient { get; set; } + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + public bool? DeviceCorrelationIdSetInClient { get; set; } + /// Whether or not store the session image. + public bool? EnableSessionImage { get; set; } + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + public LivenessModel? LivenessSingleModalModel { get; set; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; set; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; set; } + /// Whether or not return the verify image hash. + public bool? ReturnVerifyImageHash { get; set; } + /// Threshold for confidence of the face verification. + public float? VerifyConfidenceThreshold { get; set; } } } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs new file mode 100644 index 000000000000..cccaed59a390 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLivenessWithVerifySessionMultipartContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("Parameters"u8); + writer.WriteObjectValue(Parameters, options); + writer.WritePropertyName("VerifyImage"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage)); +#else + using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage))) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessWithVerifySessionMultipartContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options); + } + + internal static CreateLivenessWithVerifySessionMultipartContent DeserializeCreateLivenessWithVerifySessionMultipartContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + CreateLivenessWithVerifySessionContent parameters = default; + Stream verifyImage = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("Parameters"u8)) + { + parameters = CreateLivenessWithVerifySessionContent.DeserializeCreateLivenessWithVerifySessionContent(property.Value, options); + continue; + } + if (property.NameEquals("VerifyImage"u8)) + { + verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessWithVerifySessionMultipartContent(parameters, verifyImage, serializedAdditionalRawData); + } + + private BinaryData SerializeMultipart(ModelReaderWriterOptions options) + { + using MultipartFormDataRequestContent content = ToMultipartRequestContent(); + using MemoryStream stream = new MemoryStream(); + content.WriteTo(stream); + if (stream.Position > int.MaxValue) + { + return BinaryData.FromStream(stream); + } + else + { + return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position)); + } + } + + internal virtual MultipartFormDataRequestContent ToMultipartRequestContent() + { + MultipartFormDataRequestContent content = new MultipartFormDataRequestContent(); + content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters"); + content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream"); + return content; + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + case "MFD": + return SerializeMultipart(options); + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessWithVerifySessionMultipartContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessWithVerifySessionMultipartContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs new file mode 100644 index 000000000000..972c14733528 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.IO; + +namespace Azure.AI.Vision.Face +{ + /// Request of liveness with verify session creation. + internal partial class CreateLivenessWithVerifySessionMultipartContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// or is null. + public CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage) + { + Argument.AssertNotNull(parameters, nameof(parameters)); + Argument.AssertNotNull(verifyImage, nameof(verifyImage)); + + Parameters = parameters; + VerifyImage = verifyImage; + } + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData) + { + Parameters = parameters; + VerifyImage = verifyImage; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessWithVerifySessionMultipartContent() + { + } + + /// The parameters for creating session. + public CreateLivenessWithVerifySessionContent Parameters { get; } + /// The image stream for verify. Content-Disposition header field for this part must have filename. + public Stream VerifyImage { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs new file mode 100644 index 000000000000..682c4f08d863 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreatePersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + + internal static CreatePersonRequest DeserializeCreatePersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs new file mode 100644 index 000000000000..5f2cefbcce5b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreatePersonRequest. + internal partial class CreatePersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreatePersonRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreatePersonRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs new file mode 100644 index 000000000000..e0afa23647b8 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreatePersonResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonResult(document.RootElement, options); + } + + internal static CreatePersonResult DeserializeCreatePersonResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonResult(personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs new file mode 100644 index 000000000000..d2b993228dd6 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response of create person. + public partial class CreatePersonResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Person ID of the person. + internal CreatePersonResult(Guid personId) + { + PersonId = personId; + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// Keeps track of any properties unknown to the library. + internal CreatePersonResult(Guid personId, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonResult() + { + } + + /// Person ID of the person. + public Guid PersonId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs new file mode 100644 index 000000000000..5dddb239ac5c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateRequest(document.RootElement, options); + } + + internal static CreateRequest DeserializeCreateRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs new file mode 100644 index 000000000000..28b037d301c9 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateRequest. + internal partial class CreateRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreateRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs new file mode 100644 index 000000000000..2d7461262254 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateRequest1 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateRequest1(document.RootElement, options); + } + + internal static CreateRequest1 DeserializeCreateRequest1(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateRequest1(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{options.Format}' format."); + } + } + + CreateRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateRequest1 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateRequest1(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs new file mode 100644 index 000000000000..6c354abd8419 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateRequest1. + internal partial class CreateRequest1 + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreateRequest1(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateRequest1(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateRequest1() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs new file mode 100644 index 000000000000..df96544d127a --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class DetectFromSessionImageRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("sessionImageId"u8); + writer.WriteStringValue(SessionImageId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DetectFromSessionImageRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDetectFromSessionImageRequest(document.RootElement, options); + } + + internal static DetectFromSessionImageRequest DeserializeDetectFromSessionImageRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sessionImageId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("sessionImageId"u8)) + { + sessionImageId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DetectFromSessionImageRequest(sessionImageId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{options.Format}' format."); + } + } + + DetectFromSessionImageRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDetectFromSessionImageRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DetectFromSessionImageRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDetectFromSessionImageRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs new file mode 100644 index 000000000000..637773ec68ff --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The DetectFromSessionImageRequest. + internal partial class DetectFromSessionImageRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Id of session image. + /// is null. + internal DetectFromSessionImageRequest(string sessionImageId) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + SessionImageId = sessionImageId; + } + + /// Initializes a new instance of . + /// Id of session image. + /// Keeps track of any properties unknown to the library. + internal DetectFromSessionImageRequest(string sessionImageId, IDictionary serializedAdditionalRawData) + { + SessionImageId = sessionImageId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DetectFromSessionImageRequest() + { + } + + /// Id of session image. + public string SessionImageId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml index 14e05d4067a3..7568dc79da64 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml @@ -213,6 +213,208 @@ Response response = client.Group(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", +}); +Response response = await client.FindSimilarFromLargeFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", +}); +Response response = client.FindSimilarFromLargeFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7, +}); +Response response = await client.IdentifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7, +}); +Response response = client.IdentifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromLargePersonGroup(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", +}); +Response response = await client.VerifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", +}); +Response response = client.VerifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); ]]> diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml index 3ab65974a572..3754a0217479 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml @@ -497,6 +497,120 @@ Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call DetectFromSessionImageAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.DetectFromSessionImageAsync("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); +]]> + + + +This sample shows how to call DetectFromSessionImage. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.DetectFromSessionImage("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); +]]> + + + +This sample shows how to call DetectFromSessionImageAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", +}); +Response response = await client.DetectFromSessionImageAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); +]]> + + + +This sample shows how to call DetectFromSessionImage and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", +}); +Response response = client.DetectFromSessionImage(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); +]]> + + + +This sample shows how to call GetSessionImageAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa"); +]]> + + + +This sample shows how to call GetSessionImage. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa"); +]]> + + + +This sample shows how to call GetSessionImageAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call GetSessionImage and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); ]]> diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml new file mode 100644 index 000000000000..a1da24478c39 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml @@ -0,0 +1,469 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.CreateAsync("your_large_face_list_name"); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.Create("your_large_face_list_name"); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = client.Create(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.DeleteAsync(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.Delete(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetLargeFaceListAsync(); +]]> + + + +This sample shows how to call GetLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetLargeFaceList(); +]]> + + + +This sample shows how to call GetLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetLargeFaceListAsync(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetLargeFaceList(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call UpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", +}); +Response response = await client.UpdateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Update. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", +}); +Response response = client.Update(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + +Response> response = await client.GetLargeFaceListsAsync(); +]]> + + + +This sample shows how to call GetLargeFaceLists. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + +Response> response = client.GetLargeFaceLists(); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + +Response response = await client.GetLargeFaceListsAsync("my_list_id", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceLists and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + +Response response = client.GetLargeFaceLists("my_list_id", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetTrainingStatusAsync(); +]]> + + + +This sample shows how to call GetTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetTrainingStatus(); +]]> + + + +This sample shows how to call GetTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetTrainingStatusAsync(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetTrainingStatus(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call DeleteFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.DeleteFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.DeleteFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call UpdateFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = await client.UpdateFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = client.UpdateFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFacesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response> response = await client.GetFacesAsync(); +]]> + + + +This sample shows how to call GetFaces. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response> response = client.GetFaces(); +]]> + + + +This sample shows how to call GetFacesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = await client.GetFacesAsync("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFaces and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Response response = client.GetFaces("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call TrainAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Operation operation = await client.TrainAsync(WaitUntil.Completed); +]]> + + + +This sample shows how to call Train. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + +Operation operation = client.Train(WaitUntil.Completed); +]]> + + + \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml new file mode 100644 index 000000000000..5d708b357ab4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml @@ -0,0 +1,645 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.CreateAsync("your_large_person_group_name"); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.Create("your_large_person_group_name"); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = client.Create(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.DeleteAsync(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.Delete(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetLargePersonGroupAsync(); +]]> + + + +This sample shows how to call GetLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetLargePersonGroup(); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetLargePersonGroupAsync(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetLargePersonGroup(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call UpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", +}); +Response response = await client.UpdateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Update. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", +}); +Response response = client.Update(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + +Response> response = await client.GetLargePersonGroupsAsync(); +]]> + + + +This sample shows how to call GetLargePersonGroups. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + +Response> response = client.GetLargePersonGroups(); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + +Response response = await client.GetLargePersonGroupsAsync("00000000-0000-0000-0000-000000000000", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroups and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + +Response response = client.GetLargePersonGroups("00000000-0000-0000-0000-000000000000", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetTrainingStatusAsync(); +]]> + + + +This sample shows how to call GetTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetTrainingStatus(); +]]> + + + +This sample shows how to call GetTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetTrainingStatusAsync(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetTrainingStatus(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call CreatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.CreatePersonAsync("your_large_person_group_person_name"); +]]> + + + +This sample shows how to call CreatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.CreatePerson("your_large_person_group_person_name"); +]]> + + + +This sample shows how to call CreatePersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = await client.CreatePersonAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call CreatePerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = client.CreatePerson(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call DeletePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.DeletePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.DeletePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); +]]> + + + +This sample shows how to call GetPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); +]]> + + + +This sample shows how to call GetPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> + + + +This sample shows how to call UpdatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = await client.UpdatePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = client.UpdatePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response> response = await client.GetPersonsAsync(); +]]> + + + +This sample shows how to call GetPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response> response = client.GetPersons(); +]]> + + + +This sample shows how to call GetPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetPersonsAsync("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetPersons("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call DeleteFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.DeleteFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.DeleteFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call UpdateFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = await client.UpdateFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = client.UpdateFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call TrainAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Operation operation = await client.TrainAsync(WaitUntil.Completed); +]]> + + + +This sample shows how to call Train. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + +Operation operation = client.Train(WaitUntil.Completed); +]]> + + + \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs new file mode 100644 index 000000000000..23c0a1e61880 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs @@ -0,0 +1,123 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated client. + /// The FaceAdministration service client. + public partial class FaceAdministrationClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of FaceAdministrationClient for mocking. + protected FaceAdministrationClient() + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of LargeFaceListClientImpl. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// is null. + /// is an empty string, and was expected to be non-empty. + public virtual LargeFaceListClientImpl GetLargeFaceListClientImplClient(string largeFaceListId) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + return new LargeFaceListClientImpl(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largeFaceListId, _apiVersion); + } + + /// Initializes a new instance of LargePersonGroupClientImpl. + /// ID of the container. + /// is null. + /// is an empty string, and was expected to be non-empty. + public virtual LargePersonGroupClientImpl GetLargePersonGroupClientImplClient(string largePersonGroupId) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + return new LargePersonGroupClientImpl(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largePersonGroupId, _apiVersion); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs index 1a0b973e9201..7cb8c3849ea7 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs @@ -105,7 +105,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// URL of input image. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -114,22 +114,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. internal virtual async Task>> DetectFromUrlImplAsync(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(uri, nameof(uri)); @@ -150,7 +135,7 @@ internal virtual async Task>> Detect /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// URL of input image. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -159,22 +144,7 @@ internal virtual async Task>> Detect /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. internal virtual Response> DetectFromUrlImpl(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(uri, nameof(uri)); @@ -209,7 +179,7 @@ internal virtual Response> DetectFromUrlImpl( /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -254,7 +224,7 @@ internal virtual async Task DetectFromUrlImplAsync(RequestContent cont /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -285,7 +255,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// The input image binary. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -294,22 +264,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. internal virtual async Task>> DetectImplAsync(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(imageContent, nameof(imageContent)); @@ -330,7 +285,7 @@ internal virtual async Task>> Detect /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// The input image binary. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -339,22 +294,7 @@ internal virtual async Task>> Detect /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. internal virtual Response> DetectImpl(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(imageContent, nameof(imageContent)); @@ -389,7 +329,7 @@ internal virtual Response> DetectImpl(BinaryD /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -434,7 +374,7 @@ internal virtual async Task DetectImplAsync(RequestContent content, st /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -470,13 +410,7 @@ internal virtual Response DetectImpl(RequestContent content, string detectionMod /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. /// The cancellation token to use. /// is null. - /// - /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. - /// - /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. - /// - /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details. /// public virtual async Task>> FindSimilarAsync(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) { @@ -503,13 +437,7 @@ public virtual async Task>> FindSi /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. /// The cancellation token to use. /// is null. - /// - /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. - /// - /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. - /// - /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details. /// public virtual Response> FindSimilar(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) { @@ -611,14 +539,7 @@ public virtual Response FindSimilar(RequestContent content, RequestContext conte /// The faceId of one face, come from "Detect". /// The faceId of another face, come from "Detect". /// The cancellation token to use. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. - /// > * For the scenarios that are sensitive to accuracy please make your own judgment. - /// > * The 'recognitionModel' associated with the both faces should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details. /// public virtual async Task> VerifyFaceToFaceAsync(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) { @@ -632,14 +553,7 @@ public virtual async Task> VerifyFaceToFaceAsyn /// The faceId of one face, come from "Detect". /// The faceId of another face, come from "Detect". /// The cancellation token to use. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. - /// > * For the scenarios that are sensitive to accuracy please make your own judgment. - /// > * The 'recognitionModel' associated with the both faces should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details. /// public virtual Response VerifyFaceToFace(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) { @@ -731,14 +645,7 @@ public virtual Response VerifyFaceToFace(RequestContent content, RequestContext /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. /// The cancellation token to use. /// is null. - /// - /// > - /// * - /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. - /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. - /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. - /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details. /// public virtual async Task> GroupAsync(IEnumerable faceIds, CancellationToken cancellationToken = default) { @@ -754,14 +661,7 @@ public virtual async Task> GroupAsync(IEnumerable Array of candidate faceIds created by "Detect". The maximum is 1000 faces. /// The cancellation token to use. /// is null. - /// - /// > - /// * - /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. - /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. - /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. - /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details. /// public virtual Response Group(IEnumerable faceIds, CancellationToken cancellationToken = default) { @@ -851,6 +751,386 @@ public virtual Response Group(RequestContent content, RequestContext context = n } } + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details. + /// + public virtual async Task>> FindSimilarFromLargeFaceListAsync(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await FindSimilarFromLargeFaceListAsync(findSimilarFromLargeFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details. + /// + public virtual Response> FindSimilarFromLargeFaceList(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = FindSimilarFromLargeFaceList(findSimilarFromLargeFaceListRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task FindSimilarFromLargeFaceListAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response FindSimilarFromLargeFaceList(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details. + /// + public virtual async Task>> IdentifyFromLargePersonGroupAsync(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromLargePersonGroupAsync(identifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details. + /// + public virtual Response> IdentifyFromLargePersonGroup(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromLargePersonGroup(identifyFromLargePersonGroupRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details. + /// + public virtual async Task> VerifyFromLargePersonGroupAsync(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFromLargePersonGroupAsync(verifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details. + /// + public virtual Response VerifyFromLargePersonGroup(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFromLargePersonGroup(verifyFromLargePersonGroupRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + internal HttpMessage CreateDetectFromUrlImplRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -992,6 +1272,57 @@ internal HttpMessage CreateGroupRequest(RequestContent content, RequestContext c return message; } + internal HttpMessage CreateFindSimilarFromLargeFaceListRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/findsimilars", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + private static RequestContext DefaultRequestContext = new RequestContext(); internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) { diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs new file mode 100644 index 000000000000..3d0dcf5633df --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationCandidate : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationCandidate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + + internal static FaceIdentificationCandidate DeserializeFaceIdentificationCandidate(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationCandidate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationCandidate FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationCandidate(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs new file mode 100644 index 000000000000..b4fcaba1400b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Candidate for identify call. + public partial class FaceIdentificationCandidate + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + internal FaceIdentificationCandidate(Guid personId, float confidence) + { + PersonId = personId; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationCandidate(Guid personId, float confidence, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationCandidate() + { + } + + /// personId of candidate person. + public Guid PersonId { get; } + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + public float Confidence { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs new file mode 100644 index 000000000000..edee38653822 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("candidates"u8); + writer.WriteStartArray(); + foreach (var item in Candidates) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + + internal static FaceIdentificationResult DeserializeFaceIdentificationResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + IReadOnlyList candidates = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("candidates"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FaceIdentificationCandidate.DeserializeFaceIdentificationCandidate(item, options)); + } + candidates = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationResult(faceId, candidates, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs new file mode 100644 index 000000000000..1489f5061145 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Identify result. + public partial class FaceIdentificationResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// is null. + internal FaceIdentificationResult(Guid faceId, IEnumerable candidates) + { + Argument.AssertNotNull(candidates, nameof(candidates)); + + FaceId = faceId; + Candidates = candidates.ToList(); + } + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationResult(Guid faceId, IReadOnlyList candidates, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + Candidates = candidates; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationResult() + { + } + + /// faceId of the query face. + public Guid FaceId { get; } + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + public IReadOnlyList Candidates { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs new file mode 100644 index 000000000000..67683c753b30 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The status of long running operation. + public readonly partial struct FaceOperationStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceOperationStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NotStartedValue = "notStarted"; + private const string RunningValue = "running"; + private const string SucceededValue = "succeeded"; + private const string FailedValue = "failed"; + + /// The operation is not started. + public static FaceOperationStatus NotStarted { get; } = new FaceOperationStatus(NotStartedValue); + /// The operation is still running. + public static FaceOperationStatus Running { get; } = new FaceOperationStatus(RunningValue); + /// The operation is succeeded. + public static FaceOperationStatus Succeeded { get; } = new FaceOperationStatus(SucceededValue); + /// The operation is failed. + public static FaceOperationStatus Failed { get; } = new FaceOperationStatus(FailedValue); + /// Determines if two values are the same. + public static bool operator ==(FaceOperationStatus left, FaceOperationStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceOperationStatus left, FaceOperationStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator FaceOperationStatus(string value) => new FaceOperationStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceOperationStatus other && Equals(other); + /// + public bool Equals(FaceOperationStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs index 4019a5bc3493..f3f4c70329ee 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs @@ -106,18 +106,7 @@ public FaceSessionClient(Uri endpoint, TokenCredential credential, AzureAIVision /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLiveness/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details. /// public virtual async Task> CreateLivenessSessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) { @@ -133,18 +122,7 @@ public virtual async Task> CreateLivenessS /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLiveness/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details. /// public virtual Response CreateLivenessSession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) { @@ -306,7 +284,7 @@ public virtual Response DeleteLivenessSession(string sessionId, RequestContext c } } - /// Get session result of detectLiveness/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -321,7 +299,7 @@ public virtual async Task> GetLivenessSessionResultAsy return Response.FromValue(LivenessSession.FromResponse(response), response); } - /// Get session result of detectLiveness/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -337,7 +315,7 @@ public virtual Response GetLivenessSessionResult(string session } /// - /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// /// /// @@ -377,7 +355,7 @@ public virtual async Task GetLivenessSessionResultAsync(string session } /// - /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// /// /// @@ -420,11 +398,7 @@ public virtual Response GetLivenessSessionResult(string sessionId, RequestContex /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the 'start'. - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details. /// public virtual async Task>> GetLivenessSessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -445,11 +419,7 @@ public virtual async Task>> GetLiven /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the 'start'. - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details. /// public virtual Response> GetLivenessSessions(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -540,7 +510,7 @@ public virtual Response GetLivenessSessions(string start, int? top, RequestConte } } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -565,7 +535,7 @@ public virtual async Task>> Ge return Response.FromValue(value, response); } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -591,7 +561,7 @@ public virtual Response> GetLivenessSes } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// /// /// @@ -633,7 +603,7 @@ public virtual async Task GetLivenessSessionAuditEntriesAsync(string s } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// /// /// @@ -678,27 +648,8 @@ public virtual Response GetLivenessSessionAuditEntries(string sessionId, string /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// - internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details. + internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -712,27 +663,8 @@ internal virtual async Task> Cre /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// - internal virtual Response CreateLivenessWithVerifySession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details. + internal virtual Response CreateLivenessWithVerifySession(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -752,7 +684,7 @@ internal virtual Response CreateLivenessW /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -790,7 +722,7 @@ internal virtual async Task CreateLivenessWithVerifySessionAsync(Reque /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -822,25 +754,8 @@ internal virtual Response CreateLivenessWithVerifySession(RequestContent content /// Request content of liveness with verify session creation. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// - internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. + internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -854,25 +769,8 @@ internal virtual async Task> Cre /// Request content of liveness with verify session creation. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// - internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. + internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -892,7 +790,7 @@ internal virtual Response CreateLivenessW /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -931,7 +829,7 @@ internal virtual async Task CreateLivenessWithVerifySessionWithVerifyI /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -1032,7 +930,7 @@ public virtual Response DeleteLivenessWithVerifySession(string sessionId, Reques } } - /// Get session result of detectLivenessWithVerify/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -1047,7 +945,7 @@ public virtual async Task> GetLivenessWithVe return Response.FromValue(LivenessWithVerifySession.FromResponse(response), response); } - /// Get session result of detectLivenessWithVerify/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -1063,7 +961,7 @@ public virtual Response GetLivenessWithVerifySessionR } /// - /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// /// /// @@ -1103,7 +1001,7 @@ public virtual async Task GetLivenessWithVerifySessionResultAsync(stri } /// - /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// /// /// @@ -1146,11 +1044,7 @@ public virtual Response GetLivenessWithVerifySessionResult(string sessionId, Req /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the "start". - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details. /// public virtual async Task>> GetLivenessWithVerifySessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -1171,11 +1065,7 @@ public virtual async Task>> GetLiven /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the "start". - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details. /// public virtual Response> GetLivenessWithVerifySessions(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -1266,7 +1156,7 @@ public virtual Response GetLivenessWithVerifySessions(string start, int? top, Re } } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -1291,7 +1181,7 @@ public virtual async Task>> Ge return Response.FromValue(value, response); } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -1317,7 +1207,7 @@ public virtual Response> GetLivenessWit } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// /// /// @@ -1359,7 +1249,7 @@ public virtual async Task GetLivenessWithVerifySessionAuditEntriesAsyn } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// /// /// @@ -1400,6 +1290,270 @@ public virtual Response GetLivenessWithVerifySessionAuditEntries(string sessionI } } + /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// Id of session image. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details. + /// + public virtual async Task>> DetectFromSessionImageAsync(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await DetectFromSessionImageAsync(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// Id of session image. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details. + /// + public virtual Response> DetectFromSessionImage(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = DetectFromSessionImage(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DetectFromSessionImageAsync(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DetectFromSessionImage(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// The request ID of the image to be retrieved. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetSessionImageAsync(string sessionImageId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSessionImageAsync(sessionImageId, context).ConfigureAwait(false); + return Response.FromValue(response.Content, response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// The request ID of the image to be retrieved. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetSessionImage(string sessionImageId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSessionImage(sessionImageId, context); + return Response.FromValue(response.Content, response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request ID of the image to be retrieved. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSessionImageAsync(string sessionImageId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request ID of the image to be retrieved. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSessionImage(string sessionImageId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + internal HttpMessage CreateCreateLivenessSessionRequest(RequestContent content, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -1611,6 +1765,67 @@ internal HttpMessage CreateGetLivenessWithVerifySessionAuditEntriesRequest(strin return message; } + internal HttpMessage CreateDetectFromSessionImageRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detect", false); + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (recognitionModel != null) + { + uri.AppendQuery("recognitionModel", recognitionModel, true); + } + if (returnFaceId != null) + { + uri.AppendQuery("returnFaceId", returnFaceId.Value, true); + } + if (returnFaceAttributes != null && !(returnFaceAttributes is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("returnFaceAttributes", returnFaceAttributes, ",", true); + } + if (returnFaceLandmarks != null) + { + uri.AppendQuery("returnFaceLandmarks", returnFaceLandmarks.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + if (faceIdTimeToLive != null) + { + uri.AppendQuery("faceIdTimeToLive", faceIdTimeToLive.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetSessionImageRequest(string sessionImageId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/session/sessionImages/", false); + uri.AppendPath(sessionImageId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/octet-stream"); + return message; + } + private static RequestContext DefaultRequestContext = new RequestContext(); internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) { diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs new file mode 100644 index 000000000000..3baf11bb5f72 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceTrainingResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + writer.WritePropertyName("lastActionDateTime"u8); + writer.WriteStringValue(LastActionDateTime, "O"); + writer.WritePropertyName("lastSuccessfulTrainingDateTime"u8); + writer.WriteStringValue(LastSuccessfulTrainingDateTime, "O"); + if (Optional.IsDefined(Message)) + { + writer.WritePropertyName("message"u8); + writer.WriteStringValue(Message); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceTrainingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceTrainingResult(document.RootElement, options); + } + + internal static FaceTrainingResult DeserializeFaceTrainingResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceOperationStatus status = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset lastActionDateTime = default; + DateTimeOffset lastSuccessfulTrainingDateTime = default; + string message = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("status"u8)) + { + status = new FaceOperationStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastActionDateTime"u8)) + { + lastActionDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastSuccessfulTrainingDateTime"u8)) + { + lastSuccessfulTrainingDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("message"u8)) + { + message = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{options.Format}' format."); + } + } + + FaceTrainingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceTrainingResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceTrainingResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceTrainingResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs new file mode 100644 index 000000000000..79ac7a85a12b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Training result of a container. + public partial class FaceTrainingResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// Keeps track of any properties unknown to the library. + internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime, string message, IDictionary serializedAdditionalRawData) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + Message = message; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceTrainingResult() + { + } + + /// Training status of the container. + public FaceOperationStatus Status { get; } + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + public DateTimeOffset CreatedDateTime { get; } + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + public DateTimeOffset LastActionDateTime { get; } + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + public DateTimeOffset LastSuccessfulTrainingDateTime { get; } + /// Show failure message when training failed (omitted when training succeed). + public string Message { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs new file mode 100644 index 000000000000..dedc938a5798 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class FindSimilarFromLargeFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FindSimilarFromLargeFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + + internal static FindSimilarFromLargeFaceListRequest DeserializeFindSimilarFromLargeFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + int? maxNumOfCandidatesReturned = default; + FindSimilarMatchMode? mode = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new FindSimilarMatchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + FindSimilarFromLargeFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FindSimilarFromLargeFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs new file mode 100644 index 000000000000..8603dc4b91cf --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The FindSimilarFromLargeFaceListRequest. + internal partial class FindSimilarFromLargeFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// is null. + internal FindSimilarFromLargeFaceListRequest(Guid faceId, string largeFaceListId) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FaceId = faceId; + LargeFaceListId = largeFaceListId; + } + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// Keeps track of any properties unknown to the library. + internal FindSimilarFromLargeFaceListRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + Mode = mode; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FindSimilarFromLargeFaceListRequest() + { + } + + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + public Guid FaceId { get; } + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + public int? MaxNumOfCandidatesReturned { get; } + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + public FindSimilarMatchMode? Mode { get; } + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 000000000000..44d5b2573408 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static IdentifyFromLargePersonGroupRequest DeserializeIdentifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList faceIds = default; + string largePersonGroupId = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromLargePersonGroupRequest(faceIds, largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs new file mode 100644 index 000000000000..f8ae4acdf23c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromLargePersonGroupRequest. + internal partial class IdentifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// or is null. + internal IdentifyFromLargePersonGroupRequest(IEnumerable faceIds, string largePersonGroupId) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceIds = faceIds.ToList(); + LargePersonGroupId = largePersonGroupId; + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromLargePersonGroupRequest(IReadOnlyList faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + LargePersonGroupId = largePersonGroupId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromLargePersonGroupRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IReadOnlyList FaceIds { get; } + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + public string LargePersonGroupId { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs new file mode 100644 index 000000000000..d52d68e0bc12 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceList : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceList(document.RootElement, options); + } + + internal static LargeFaceList DeserializeLargeFaceList(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceList(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceList FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceList(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs new file mode 100644 index 000000000000..586366e51336 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Large face list is a list of faces, up to 1,000,000 faces. + public partial class LargeFaceList + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargeFaceList(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Keeps track of any properties unknown to the library. + internal LargeFaceList(string name, string userData, FaceRecognitionModel? recognitionModel, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargeFaceList() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClientImpl.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClientImpl.cs new file mode 100644 index 000000000000..98cf82a6473f --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClientImpl.cs @@ -0,0 +1,1548 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargeFaceListClientImpl sub-client. + public partial class LargeFaceListClientImpl + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _largeFaceListId; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of LargeFaceListClientImpl for mocking. + protected LargeFaceListClientImpl() + { + } + + /// Initializes a new instance of LargeFaceListClientImpl. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// API Version. Allowed values: "v1.1-preview.1" | "v1.2-preview.1". + internal LargeFaceListClientImpl(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string largeFaceListId, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _largeFaceListId = largeFaceListId; + _apiVersion = apiVersion; + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/create-large-face-list for more details. + /// + public virtual async Task CreateAsync(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest1 createRequest1 = new CreateRequest1(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(createRequest1.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/create-large-face-list for more details. + /// + public virtual Response Create(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest1 createRequest1 = new CreateRequest1(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(createRequest1.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual async Task> GetLargeFaceListAsync(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListAsync(returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual Response GetLargeFaceList(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceList(returnRecognitionModel, context); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListAsync(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceList(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Update(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-lists for more details. + /// + public virtual async Task>> GetLargeFaceListsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-lists for more details. + /// + public virtual Response> GetLargeFaceLists(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceLists(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceLists(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// The cancellation token to use. + /// + public virtual async Task> GetTrainingStatusAsync(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetTrainingStatusAsync(context).ConfigureAwait(false); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// The cancellation token to use. + /// + public virtual Response GetTrainingStatus(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetTrainingStatus(context); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetTrainingStatusAsync(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetTrainingStatus(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + internal virtual async Task> AddFaceFromUrlImplAsync(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest1 addFaceFromUrlRequest1 = new AddFaceFromUrlRequest1(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceFromUrlImplAsync(addFaceFromUrlRequest1.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + internal virtual Response AddFaceFromUrlImpl(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest1 addFaceFromUrlRequest1 = new AddFaceFromUrlRequest1(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceFromUrlImpl(addFaceFromUrlRequest1.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceFromUrlImplAsync(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceFromUrlImpl(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + internal virtual async Task> AddFaceImplAsync(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceImplAsync(content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + internal virtual Response AddFaceImpl(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceImpl(content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceImplAsync(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceImpl(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/delete-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceAsync(Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/delete-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFace(Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual async Task> GetFaceAsync(Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceAsync(persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual Response GetFace(Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFace(persistedFaceId, context); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceAsync(Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFace(Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateFaceAsync(Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateFace(Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-faces for more details. + /// + public virtual async Task>> GetFacesAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFacesAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-faces for more details. + /// + public virtual Response> GetFaces(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFaces(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFacesAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFacesRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFaces(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.GetFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFacesRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainAsync(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "LargeFaceListClientImpl.Train", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation Train(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClientImpl.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "LargeFaceListClientImpl.Train", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargeFaceListRequest(bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargeFaceListsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetTrainingStatusRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddFaceFromUrlImplRequest(RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddFaceImplRequest(RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceRequest(Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetFaceRequest(Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateFaceRequest(Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetFacesRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs new file mode 100644 index 000000000000..2f9621a39bbc --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceListFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceListFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + + internal static LargeFaceListFace DeserializeLargeFaceListFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceListFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceListFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceListFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs new file mode 100644 index 000000000000..6a0efa75f2a0 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large face list. + public partial class LargeFaceListFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargeFaceListFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargeFaceListFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs new file mode 100644 index 000000000000..0a39ef9f4066 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroup : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroup(document.RootElement, options); + } + + internal static LargePersonGroup DeserializeLargePersonGroup(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largePersonGroupId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroup IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroup(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroup FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroup(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs new file mode 100644 index 000000000000..a38758c190fd --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people. + public partial class LargePersonGroup + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroup(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroup(string name, string userData, FaceRecognitionModel? recognitionModel, string largePersonGroupId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargePersonGroupId = largePersonGroupId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroup() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// ID of the container. + public string LargePersonGroupId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClientImpl.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClientImpl.cs new file mode 100644 index 000000000000..699f1bafea34 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClientImpl.cs @@ -0,0 +1,1991 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargePersonGroupClientImpl sub-client. + public partial class LargePersonGroupClientImpl + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _largePersonGroupId; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of LargePersonGroupClientImpl for mocking. + protected LargePersonGroupClientImpl() + { + } + + /// Initializes a new instance of LargePersonGroupClientImpl. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// API Version. Allowed values: "v1.1-preview.1" | "v1.2-preview.1". + internal LargePersonGroupClientImpl(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string largePersonGroupId, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _largePersonGroupId = largePersonGroupId; + _apiVersion = apiVersion; + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group for more details. + /// + public virtual async Task CreateAsync(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest createRequest = new CreateRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(createRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group for more details. + /// + public virtual Response Create(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest createRequest = new CreateRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(createRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual async Task> GetLargePersonGroupAsync(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupAsync(returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual Response GetLargePersonGroup(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroup(returnRecognitionModel, context); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupAsync(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroup(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Update(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-groups for more details. + /// + public virtual async Task>> GetLargePersonGroupsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-groups for more details. + /// + public virtual Response> GetLargePersonGroups(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroups(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroups(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-training-status for more details. + /// + public virtual async Task> GetTrainingStatusAsync(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetTrainingStatusAsync(context).ConfigureAwait(false); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-training-status for more details. + /// + public virtual Response GetTrainingStatus(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetTrainingStatus(context); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetTrainingStatusAsync(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetTrainingStatus(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group-person for more details. + /// + public virtual async Task> CreatePersonAsync(string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreatePersonAsync(createPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group-person for more details. + /// + public virtual Response CreatePerson(string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreatePerson(createPersonRequest.ToRequestContent(), context); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreatePersonAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreatePerson(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeletePersonAsync(Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeletePerson(Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// ID of the person. + /// The cancellation token to use. + /// + public virtual async Task> GetPersonAsync(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonAsync(personId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// ID of the person. + /// The cancellation token to use. + /// + public virtual Response GetPerson(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPerson(personId, context); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonAsync(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPerson(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonAsync(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePerson(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-persons for more details. + /// + public virtual async Task>> GetPersonsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-persons for more details. + /// + public virtual Response> GetPersons(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersons(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersons(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + internal virtual async Task> AddFaceFromUrlImplAsync(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest addFaceFromUrlRequest = new AddFaceFromUrlRequest(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceFromUrlImplAsync(personId, addFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + internal virtual Response AddFaceFromUrlImpl(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest addFaceFromUrlRequest = new AddFaceFromUrlRequest(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceFromUrlImpl(personId, addFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceFromUrlImplAsync(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceFromUrlImpl(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + internal virtual async Task> AddFaceImplAsync(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceImplAsync(personId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + internal virtual Response AddFaceImpl(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceImpl(personId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceImplAsync(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceImpl(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceAsync(Guid personId, Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFace(Guid personId, Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual async Task> GetFaceAsync(Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceAsync(personId, persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual Response GetFace(Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFace(personId, persistedFaceId, context); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceAsync(Guid personId, Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFace(Guid personId, Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateFaceAsync(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(personId, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateFace(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(personId, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainAsync(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "LargePersonGroupClientImpl.Train", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation Train(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClientImpl.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "LargePersonGroupClientImpl.Train", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupRequest(bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargePersonGroupsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetTrainingStatusRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreatePersonRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonRequest(Guid personId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddFaceFromUrlImplRequest(Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddFaceImplRequest(Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceRequest(Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetFaceRequest(Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateFaceRequest(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs new file mode 100644 index 000000000000..eb363bd7f27c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPerson : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsCollectionDefined(PersistedFaceIds)) + { + writer.WritePropertyName("persistedFaceIds"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPerson IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + + internal static LargePersonGroupPerson DeserializeLargePersonGroupPerson(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + string name = default; + string userData = default; + IReadOnlyList persistedFaceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("persistedFaceIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + persistedFaceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPerson IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPerson FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPerson(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs new file mode 100644 index 000000000000..1884b1812ec4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The person in a specified large person group. To add face to this person, please call "Add Large Person Group Person Face". + public partial class LargePersonGroupPerson + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroupPerson(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + PersistedFaceIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPerson(Guid personId, string name, string userData, IReadOnlyList persistedFaceIds, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Name = name; + UserData = userData; + PersistedFaceIds = persistedFaceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroupPerson() + { + } + + /// ID of the person. + public Guid PersonId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Face ids of registered faces in the person. + public IReadOnlyList PersistedFaceIds { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs new file mode 100644 index 000000000000..a602eb3457e0 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPersonFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPersonFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + + internal static LargePersonGroupPersonFace DeserializeLargePersonGroupPersonFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPersonFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPersonFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPersonFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs new file mode 100644 index 000000000000..56aa4d0061ff --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large person group person. + public partial class LargePersonGroupPersonFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargePersonGroupPersonFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPersonFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs index 2a3c6d8b1280..f5a0af9fc967 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs @@ -22,19 +22,13 @@ public LivenessModel(string value) _value = value ?? throw new ArgumentNullException(nameof(value)); } - private const string V20200215Preview01Value = "2020-02-15-preview.01"; - private const string V20211112Preview03Value = "2021-11-12-preview.03"; private const string V20221015Preview04Value = "2022-10-15-preview.04"; - private const string V20230302Preview05Value = "2023-03-02-preview.05"; + private const string V20231220Preview06Value = "2023-12-20-preview.06"; - /// 2020-02-15-preview.01. - public static LivenessModel V20200215Preview01 { get; } = new LivenessModel(V20200215Preview01Value); - /// 2021-11-12-preview.03. - public static LivenessModel V20211112Preview03 { get; } = new LivenessModel(V20211112Preview03Value); /// 2022-10-15-preview.04. public static LivenessModel V20221015Preview04 { get; } = new LivenessModel(V20221015Preview04Value); - /// 2023-03-02-preview.05. - public static LivenessModel V20230302Preview05 { get; } = new LivenessModel(V20230302Preview05Value); + /// 2023-12-20-preview.06. + public static LivenessModel V20231220Preview06 { get; } = new LivenessModel(V20231220Preview06Value); /// Determines if two values are the same. public static bool operator ==(LivenessModel left, LivenessModel right) => left.Equals(right); /// Determines if two values are not the same. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs index b29e45e21167..a1b1ed8b4884 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs @@ -10,7 +10,7 @@ namespace Azure.AI.Vision.Face { - /// The liveness operation mode to drive the client’s end-user experience. + /// The liveness operation mode to drive the client's end-user experience. public readonly partial struct LivenessOperationMode : IEquatable { private readonly string _value; @@ -25,9 +25,9 @@ public LivenessOperationMode(string value) private const string PassiveValue = "Passive"; private const string PassiveActiveValue = "PassiveActive"; - /// Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user’s to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution. + /// Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user's to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution. public static LivenessOperationMode Passive { get; } = new LivenessOperationMode(PassiveValue); - /// This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode’s operational envelope on Web based solutions. + /// This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode's operational envelope on Web based solutions. public static LivenessOperationMode PassiveActive { get; } = new LivenessOperationMode(PassiveActiveValue); /// Determines if two values are the same. public static bool operator ==(LivenessOperationMode left, LivenessOperationMode right) => left.Equals(right); diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs index 8db941366715..203873526c05 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs @@ -42,6 +42,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRea writer.WriteObjectValue(Response, options); writer.WritePropertyName("digest"u8); writer.WriteStringValue(Digest); + if (Optional.IsDefined(SessionImageId)) + { + writer.WritePropertyName("sessionImageId"u8); + writer.WriteStringValue(SessionImageId); + } + if (Optional.IsDefined(VerifyImageHash)) + { + writer.WritePropertyName("verifyImageHash"u8); + writer.WriteStringValue(VerifyImageHash); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -88,6 +98,8 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J AuditRequestInfo request = default; AuditLivenessResponseInfo response = default; string digest = default; + string sessionImageId = default; + string verifyImageHash = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -132,6 +144,16 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J digest = property.Value.GetString(); continue; } + if (property.NameEquals("sessionImageId"u8)) + { + sessionImageId = property.Value.GetString(); + continue; + } + if (property.NameEquals("verifyImageHash"u8)) + { + verifyImageHash = property.Value.GetString(); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); @@ -147,6 +169,8 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J request, response, digest, + sessionImageId, + verifyImageHash, serializedAdditionalRawData); } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs index 999391540240..46b364a13ffd 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs @@ -83,8 +83,10 @@ internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, /// The request of this entry. /// The response of this entry. /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// The image ID of the session request. + /// The sha256 hash of the verify-image in the request. /// Keeps track of any properties unknown to the library. - internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest, IDictionary serializedAdditionalRawData) + internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest, string sessionImageId, string verifyImageHash, IDictionary serializedAdditionalRawData) { Id = id; SessionId = sessionId; @@ -94,6 +96,8 @@ internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, Request = request; Response = response; Digest = digest; + SessionImageId = sessionImageId; + VerifyImageHash = verifyImageHash; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -118,5 +122,9 @@ internal LivenessSessionAuditEntry() public AuditLivenessResponseInfo Response { get; } /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. public string Digest { get; } + /// The image ID of the session request. + public string SessionImageId { get; } + /// The sha256 hash of the verify-image in the request. + public string VerifyImageHash { get; } } } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 000000000000..fa9cc27b3b68 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static VerifyFromLargePersonGroupRequest DeserializeVerifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + string largePersonGroupId = default; + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs new file mode 100644 index 000000000000..e78b93c002c1 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFromLargePersonGroupRequest. + internal partial class VerifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// is null. + internal VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + } + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// Keeps track of any properties unknown to the library. + internal VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFromLargePersonGroupRequest() + { + } + + /// The faceId of the face, come from "Detect". + public Guid FaceId { get; } + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + public string LargePersonGroupId { get; } + /// Specify a certain person in Large Person Group. + public Guid PersonId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs index 0e9369d88787..ff7d77ae47af 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs @@ -230,5 +230,207 @@ public async Task Example_FaceClient_Group_GroupFaceIDs_Convenience_Async() Response response = await client.GroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), Guid.Parse("015839fb-fbd9-4f79-ace9-7675fc2f1dd9"), Guid.Parse("65d083d4-9447-47d1-af30-b626144bf0fb"), Guid.Parse("fce92aed-d578-4d2e-8114-068f8af4492e"), Guid.Parse("30ea1073-cc9e-4652-b1e3-d08fb7b95315"), Guid.Parse("be386ab3-af91-4104-9e6d-4dae4c9fddb7"), Guid.Parse("fbd2a038-dbff-452c-8e79-2ee81b1aa84e"), Guid.Parse("b64d5e15-8257-4af2-b20a-5a750f8940e7") }); } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", + }); + Response response = client.FindSimilarFromLargeFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", + }); + Response response = await client.FindSimilarFromLargeFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7, + }); + Response response = client.IdentifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7, + }); + Response response = await client.IdentifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", + }); + Response response = client.VerifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", + }); + Response response = await client.VerifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromLargePersonGroup(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); + } } } diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs index 533e4ac10132..b793cf92c5e9 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs @@ -514,5 +514,119 @@ public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEnt Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("b12e033e-bda7-4b83-a211-e721c661f30e"); } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", + }); + Response response = client.DetectFromSessionImage(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", + }); + Response response = await client.DetectFromSessionImageAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.DetectFromSessionImage("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.DetectFromSessionImageAsync("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetSessionImage_GetSessionImage() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetSessionImage_GetSessionImage_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetSessionImage_GetSessionImage_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetSessionImage_GetSessionImage_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa"); + } } } diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClientImpl.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClientImpl.cs new file mode 100644 index 000000000000..82da1dc115f1 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClientImpl.cs @@ -0,0 +1,484 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_LargeFaceListClientImpl + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Create_CreateLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = client.Create(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Create_CreateLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Create_CreateLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.Create("your_large_face_list_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Create_CreateLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.CreateAsync("your_large_face_list_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Delete_DeleteLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.Delete(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Delete_DeleteLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.DeleteAsync(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetLargeFaceList(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetLargeFaceListAsync(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetLargeFaceList(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetLargeFaceListAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Update_UpdateLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + }); + Response response = client.Update(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Update_UpdateLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + }); + Response response = await client.UpdateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + + Response response = client.GetLargeFaceLists("my_list_id", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + + Response response = await client.GetLargeFaceListsAsync("my_list_id", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + + Response> response = client.GetLargeFaceLists(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null); + + Response> response = await client.GetLargeFaceListsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetTrainingStatus(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetTrainingStatusAsync(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetTrainingStatus(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetTrainingStatusAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_DeleteFace_DeleteFaceFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.DeleteFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_DeleteFace_DeleteFaceFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.DeleteFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_UpdateFace_UpdateFaceInLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = client.UpdateFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_UpdateFace_UpdateFaceInLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = await client.UpdateFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = client.GetFaces("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response response = await client.GetFacesAsync("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response> response = client.GetFaces(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Response> response = await client.GetFacesAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Train_TrainLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Operation operation = client.Train(WaitUntil.Completed); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Train_TrainLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id"); + + Operation operation = await client.TrainAsync(WaitUntil.Completed); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClientImpl.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClientImpl.cs new file mode 100644 index 000000000000..600650ca64ed --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClientImpl.cs @@ -0,0 +1,660 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_LargePersonGroupClientImpl + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Create_CreateLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = client.Create(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Create_CreateLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Create_CreateLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.Create("your_large_person_group_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Create_CreateLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.CreateAsync("your_large_person_group_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Delete_DeleteLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.Delete(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Delete_DeleteLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.DeleteAsync(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetLargePersonGroup(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetLargePersonGroupAsync(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetLargePersonGroup(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetLargePersonGroupAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Update_UpdateLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + }); + Response response = client.Update(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Update_UpdateLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + }); + Response response = await client.UpdateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + + Response response = client.GetLargePersonGroups("00000000-0000-0000-0000-000000000000", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + + Response response = await client.GetLargePersonGroupsAsync("00000000-0000-0000-0000-000000000000", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + + Response> response = client.GetLargePersonGroups(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null); + + Response> response = await client.GetLargePersonGroupsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetTrainingStatus(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetTrainingStatusAsync(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetTrainingStatus(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetTrainingStatusAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = client.CreatePerson(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = await client.CreatePersonAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.CreatePerson("your_large_person_group_person_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.CreatePersonAsync("your_large_person_group_person_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_DeletePerson_DeletePersonFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.DeletePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_DeletePerson_DeletePersonFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.DeletePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_UpdatePerson_UpdatePersonInLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = client.UpdatePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_UpdatePerson_UpdatePersonInLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = await client.UpdatePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetPersons("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetPersonsAsync("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response> response = client.GetPersons(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response> response = await client.GetPersonsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_DeleteFace_DeleteFaceFromLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.DeleteFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_DeleteFace_DeleteFaceFromLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.DeleteFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_UpdateFace_UpdateFaceInLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = client.UpdateFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_UpdateFace_UpdateFaceInLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = await client.UpdateFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Train_TrainLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Operation operation = client.Train(WaitUntil.Completed); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Train_TrainLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id"); + + Operation operation = await client.TrainAsync(WaitUntil.Completed); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml b/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml index d170fcf1cfd5..e77c3067ac0c 100644 --- a/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml +++ b/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml @@ -1,5 +1,4 @@ directory: specification/ai/Face -commit: b9652b3e860e690c9ff53866071c591d59fed907 +commit: aea97f6dcb7b5c3039a82b7e54c49e35fe1106ac repo: Azure/azure-rest-api-specs -additionalDirectories: [] - +additionalDirectories: