diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs
index fdc44b3da4e3..d1ff0bc7e195 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs
@@ -12,9 +12,34 @@
namespace Microsoft.Extensions.Azure
{
- /// Extension methods to add , to client builder.
+ /// Extension methods to add , , to client builder.
public static partial class AIVisionFaceClientBuilderExtensions
{
+ /// Registers a instance.
+ /// The builder to register with.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ /// A credential used to authenticate to an Azure Service.
+ public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential)
+ where TBuilder : IAzureClientFactoryBuilder
+ {
+ return builder.RegisterClientFactory((options) => new FaceAdministrationClient(endpoint, credential, options));
+ }
+
+ /// Registers a instance.
+ /// The builder to register with.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint)
+ where TBuilder : IAzureClientFactoryBuilderWithCredential
+ {
+ return builder.RegisterClientFactory((options, cred) => new FaceAdministrationClient(endpoint, cred, options));
+ }
+
/// Registers a instance.
/// The builder to register with.
///
@@ -65,6 +90,14 @@ public static IAzureClientBuilder((options, cred) => new FaceSessionClient(endpoint, cred, options));
}
+ /// Registers a instance.
+ /// The builder to register with.
+ /// The configuration values.
+ public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, TConfiguration configuration)
+ where TBuilder : IAzureClientFactoryBuilderWithConfiguration
+ {
+ return builder.RegisterClientFactory(configuration);
+ }
/// Registers a instance.
/// The builder to register with.
/// The configuration values.
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs
index f85eca044688..49e4e7fad35a 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs
@@ -14,6 +14,93 @@ namespace Azure.AI.Vision.Face
/// Model factory for models.
public static partial class AIVisionFaceModelFactory
{
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.
+ /// ID of the container.
+ /// A new instance for mocking.
+ public static LargePersonGroup LargePersonGroup(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largePersonGroupId = null)
+ {
+ return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Training status of the container.
+ /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list.
+ /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained.
+ /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list.
+ /// Show failure message when training failed (omitted when training succeed).
+ /// A new instance for mocking.
+ public static FaceTrainingResult FaceTrainingResult(FaceOperationStatus status = default, DateTimeOffset createdDateTime = default, DateTimeOffset lastActionDateTime = default, DateTimeOffset lastSuccessfulTrainingDateTime = default, string message = null)
+ {
+ return new FaceTrainingResult(
+ status,
+ createdDateTime,
+ lastActionDateTime,
+ lastSuccessfulTrainingDateTime,
+ message,
+ serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Person ID of the person.
+ /// A new instance for mocking.
+ public static CreatePersonResult CreatePersonResult(Guid personId = default)
+ {
+ return new CreatePersonResult(personId, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// ID of the person.
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// Face ids of registered faces in the person.
+ /// A new instance for mocking.
+ public static LargePersonGroupPerson LargePersonGroupPerson(Guid personId = default, string name = null, string userData = null, IEnumerable persistedFaceIds = null)
+ {
+ persistedFaceIds ??= new List();
+
+ return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds?.ToList(), serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call.
+ /// A new instance for mocking.
+ public static AddFaceResult AddFaceResult(Guid persistedFaceId = default)
+ {
+ return new AddFaceResult(persistedFaceId, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Face ID of the face.
+ /// User-provided data attached to the face. The length limit is 1K.
+ /// A new instance for mocking.
+ public static LargePersonGroupPersonFace LargePersonGroupPersonFace(Guid persistedFaceId = default, string userData = null)
+ {
+ return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.
+ /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.
+ /// A new instance for mocking.
+ public static LargeFaceList LargeFaceList(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largeFaceListId = null)
+ {
+ return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// Face ID of the face.
+ /// User-provided data attached to the face. The length limit is 1K.
+ /// A new instance for mocking.
+ public static LargeFaceListFace LargeFaceListFace(Guid persistedFaceId = default, string userData = null)
+ {
+ return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData: null);
+ }
+
/// Initializes a new instance of .
/// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true.
/// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true.
@@ -277,19 +364,43 @@ public static FaceGroupingResult FaceGroupingResult(IEnumerable> gro
return new FaceGroupingResult(groups?.ToList(), messyGroup?.ToList(), serializedAdditionalRawData: null);
}
+ /// Initializes a new instance of .
+ /// faceId of the query face.
+ /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.
+ /// A new instance for mocking.
+ public static FaceIdentificationResult FaceIdentificationResult(Guid faceId = default, IEnumerable candidates = null)
+ {
+ candidates ??= new List();
+
+ return new FaceIdentificationResult(faceId, candidates?.ToList(), serializedAdditionalRawData: null);
+ }
+
+ /// Initializes a new instance of .
+ /// personId of candidate person.
+ /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].
+ /// A new instance for mocking.
+ public static FaceIdentificationCandidate FaceIdentificationCandidate(Guid personId = default, float confidence = default)
+ {
+ return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData: null);
+ }
+
/// Initializes a new instance of .
/// Type of liveness mode the client should follow.
/// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.
/// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
+ /// Whether or not store the session image.
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
/// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
/// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
/// A new instance for mocking.
- public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null)
+ public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null)
{
return new CreateLivenessSessionContent(
livenessOperationMode,
sendResultsToClient,
deviceCorrelationIdSetInClient,
+ enableSessionImage,
+ livenessSingleModalModel,
deviceCorrelationId,
authTokenTimeToLiveInSeconds,
serializedAdditionalRawData: null);
@@ -337,8 +448,10 @@ public static LivenessSession LivenessSession(string id = null, DateTimeOffset c
/// The request of this entry.
/// The response of this entry.
/// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution.
+ /// The image ID of the session request.
+ /// The sha256 hash of the verify-image in the request.
/// A new instance for mocking.
- public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null)
+ public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null, string sessionImageId = null, string verifyImageHash = null)
{
return new LivenessSessionAuditEntry(
id,
@@ -349,6 +462,8 @@ public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = defa
request,
response,
digest,
+ sessionImageId,
+ verifyImageHash,
serializedAdditionalRawData: null);
}
@@ -444,6 +559,32 @@ public static LivenessSessionItem LivenessSessionItem(string id = null, DateTime
serializedAdditionalRawData: null);
}
+ /// Initializes a new instance of .
+ /// Type of liveness mode the client should follow.
+ /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.
+ /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
+ /// Whether or not store the session image.
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
+ /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
+ /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
+ /// Whether or not return the verify image hash.
+ /// Threshold for confidence of the face verification.
+ /// A new instance for mocking.
+ public static CreateLivenessWithVerifySessionContent CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null, bool? returnVerifyImageHash = null, float? verifyConfidenceThreshold = null)
+ {
+ return new CreateLivenessWithVerifySessionContent(
+ livenessOperationMode,
+ sendResultsToClient,
+ deviceCorrelationIdSetInClient,
+ enableSessionImage,
+ livenessSingleModalModel,
+ deviceCorrelationId,
+ authTokenTimeToLiveInSeconds,
+ returnVerifyImageHash,
+ verifyConfidenceThreshold,
+ serializedAdditionalRawData: null);
+ }
+
/// Initializes a new instance of .
/// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation.
/// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable.
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs
new file mode 100644
index 000000000000..4068e6fba19a
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs
@@ -0,0 +1,135 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class AddFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("url"u8);
+ writer.WriteStringValue(Uri.AbsoluteUri);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ AddFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeAddFaceFromUrlRequest(document.RootElement, options);
+ }
+
+ internal static AddFaceFromUrlRequest DeserializeAddFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Uri url = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("url"u8))
+ {
+ url = new Uri(property.Value.GetString());
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new AddFaceFromUrlRequest(url, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ AddFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeAddFaceFromUrlRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static AddFaceFromUrlRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeAddFaceFromUrlRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs
new file mode 100644
index 000000000000..f6d64f1102d7
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The AddFaceFromUrlRequest.
+ internal partial class AddFaceFromUrlRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// URL of input image.
+ /// is null.
+ internal AddFaceFromUrlRequest(Uri uri)
+ {
+ Argument.AssertNotNull(uri, nameof(uri));
+
+ Uri = uri;
+ }
+
+ /// Initializes a new instance of .
+ /// URL of input image.
+ /// Keeps track of any properties unknown to the library.
+ internal AddFaceFromUrlRequest(Uri uri, IDictionary serializedAdditionalRawData)
+ {
+ Uri = uri;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal AddFaceFromUrlRequest()
+ {
+ }
+
+ /// URL of input image.
+ public Uri Uri { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs
new file mode 100644
index 000000000000..6afe63d8fb43
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs
@@ -0,0 +1,135 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class AddFaceFromUrlRequest1 : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("url"u8);
+ writer.WriteStringValue(Uri.AbsoluteUri);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ AddFaceFromUrlRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeAddFaceFromUrlRequest1(document.RootElement, options);
+ }
+
+ internal static AddFaceFromUrlRequest1 DeserializeAddFaceFromUrlRequest1(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Uri url = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("url"u8))
+ {
+ url = new Uri(property.Value.GetString());
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new AddFaceFromUrlRequest1(url, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ AddFaceFromUrlRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeAddFaceFromUrlRequest1(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static AddFaceFromUrlRequest1 FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeAddFaceFromUrlRequest1(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs
new file mode 100644
index 000000000000..2bce74bdbf08
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The AddFaceFromUrlRequest1.
+ internal partial class AddFaceFromUrlRequest1
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// URL of input image.
+ /// is null.
+ internal AddFaceFromUrlRequest1(Uri uri)
+ {
+ Argument.AssertNotNull(uri, nameof(uri));
+
+ Uri = uri;
+ }
+
+ /// Initializes a new instance of .
+ /// URL of input image.
+ /// Keeps track of any properties unknown to the library.
+ internal AddFaceFromUrlRequest1(Uri uri, IDictionary serializedAdditionalRawData)
+ {
+ Uri = uri;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal AddFaceFromUrlRequest1()
+ {
+ }
+
+ /// URL of input image.
+ public Uri Uri { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs
new file mode 100644
index 000000000000..716408b25a91
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs
@@ -0,0 +1,135 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ public partial class AddFaceResult : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("persistedFaceId"u8);
+ writer.WriteStringValue(PersistedFaceId);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ AddFaceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeAddFaceResult(document.RootElement, options);
+ }
+
+ internal static AddFaceResult DeserializeAddFaceResult(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Guid persistedFaceId = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("persistedFaceId"u8))
+ {
+ persistedFaceId = property.Value.GetGuid();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new AddFaceResult(persistedFaceId, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ AddFaceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeAddFaceResult(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static AddFaceResult FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeAddFaceResult(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs
new file mode 100644
index 000000000000..28ad972fc9d5
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Response body for adding face.
+ public partial class AddFaceResult
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call.
+ internal AddFaceResult(Guid persistedFaceId)
+ {
+ PersistedFaceId = persistedFaceId;
+ }
+
+ /// Initializes a new instance of .
+ /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call.
+ /// Keeps track of any properties unknown to the library.
+ internal AddFaceResult(Guid persistedFaceId, IDictionary serializedAdditionalRawData)
+ {
+ PersistedFaceId = persistedFaceId;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal AddFaceResult()
+ {
+ }
+
+ /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call.
+ public Guid PersistedFaceId { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs
index a8cf7ed227cc..a27f25f2a008 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs
@@ -13,13 +13,15 @@ namespace Azure.AI.Vision.Face
/// Client options for Azure.AI.Vision.Face library clients.
public partial class AzureAIVisionFaceClientOptions : ClientOptions
{
- private const ServiceVersion LatestVersion = ServiceVersion.V1_1_Preview_1;
+ private const ServiceVersion LatestVersion = ServiceVersion.V1_2_Preview_1;
/// The version of the service to use.
public enum ServiceVersion
{
/// Service version "v1.1-preview.1".
V1_1_Preview_1 = 1,
+ /// Service version "v1.2-preview.1".
+ V1_2_Preview_1 = 2,
}
internal string Version { get; }
@@ -30,6 +32,7 @@ public AzureAIVisionFaceClientOptions(ServiceVersion version = LatestVersion)
Version = version switch
{
ServiceVersion.V1_1_Preview_1 => "v1.1-preview.1",
+ ServiceVersion.V1_2_Preview_1 => "v1.2-preview.1",
_ => throw new NotSupportedException()
};
}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs
index 5890a4f74258..b32ad1822811 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs
@@ -38,6 +38,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model
writer.WritePropertyName("deviceCorrelationIdSetInClient"u8);
writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value);
}
+ if (Optional.IsDefined(EnableSessionImage))
+ {
+ writer.WritePropertyName("enableSessionImage"u8);
+ writer.WriteBooleanValue(EnableSessionImage.Value);
+ }
+ if (Optional.IsDefined(LivenessSingleModalModel))
+ {
+ writer.WritePropertyName("livenessSingleModalModel"u8);
+ writer.WriteStringValue(LivenessSingleModalModel.Value.ToString());
+ }
if (Optional.IsDefined(DeviceCorrelationId))
{
writer.WritePropertyName("deviceCorrelationId"u8);
@@ -89,6 +99,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon
LivenessOperationMode livenessOperationMode = default;
bool? sendResultsToClient = default;
bool? deviceCorrelationIdSetInClient = default;
+ bool? enableSessionImage = default;
+ LivenessModel? livenessSingleModalModel = default;
string deviceCorrelationId = default;
int? authTokenTimeToLiveInSeconds = default;
IDictionary serializedAdditionalRawData = default;
@@ -118,6 +130,24 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon
deviceCorrelationIdSetInClient = property.Value.GetBoolean();
continue;
}
+ if (property.NameEquals("enableSessionImage"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ enableSessionImage = property.Value.GetBoolean();
+ continue;
+ }
+ if (property.NameEquals("livenessSingleModalModel"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ livenessSingleModalModel = new LivenessModel(property.Value.GetString());
+ continue;
+ }
if (property.NameEquals("deviceCorrelationId"u8))
{
deviceCorrelationId = property.Value.GetString();
@@ -142,6 +172,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon
livenessOperationMode,
sendResultsToClient,
deviceCorrelationIdSetInClient,
+ enableSessionImage,
+ livenessSingleModalModel,
deviceCorrelationId,
authTokenTimeToLiveInSeconds,
serializedAdditionalRawData);
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs
index eb27333cbe93..9de38ee841c6 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs
@@ -10,7 +10,7 @@
namespace Azure.AI.Vision.Face
{
- /// Request for creating liveness session.
+ /// Request model for creating liveness session.
public partial class CreateLivenessSessionContent
{
///
@@ -56,14 +56,18 @@ public CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode)
/// Type of liveness mode the client should follow.
/// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.
/// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
+ /// Whether or not store the session image.
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
/// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
/// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
/// Keeps track of any properties unknown to the library.
- internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData)
+ internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData)
{
LivenessOperationMode = livenessOperationMode;
SendResultsToClient = sendResultsToClient;
DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient;
+ EnableSessionImage = enableSessionImage;
+ LivenessSingleModalModel = livenessSingleModalModel;
DeviceCorrelationId = deviceCorrelationId;
AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds;
_serializedAdditionalRawData = serializedAdditionalRawData;
@@ -80,6 +84,10 @@ internal CreateLivenessSessionContent()
public bool? SendResultsToClient { get; set; }
/// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
public bool? DeviceCorrelationIdSetInClient { get; set; }
+ /// Whether or not store the session image.
+ public bool? EnableSessionImage { get; set; }
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
+ public LivenessModel? LivenessSingleModalModel { get; set; }
/// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
public string DeviceCorrelationId { get; set; }
/// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs
index 6b713f68b180..443812ef9cf0 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs
@@ -8,13 +8,12 @@
using System;
using System.ClientModel.Primitives;
using System.Collections.Generic;
-using System.IO;
using System.Text.Json;
using Azure.Core;
namespace Azure.AI.Vision.Face
{
- internal partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel
+ public partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel
{
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
@@ -27,17 +26,48 @@ void IJsonModel.Write(Utf8JsonWriter wri
}
writer.WriteStartObject();
- writer.WritePropertyName("Parameters"u8);
- writer.WriteObjectValue(Parameters, options);
- writer.WritePropertyName("VerifyImage"u8);
-#if NET6_0_OR_GREATER
- writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage));
-#else
- using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage)))
+ writer.WritePropertyName("livenessOperationMode"u8);
+ writer.WriteStringValue(LivenessOperationMode.ToString());
+ if (Optional.IsDefined(SendResultsToClient))
{
- JsonSerializer.Serialize(writer, document.RootElement);
+ writer.WritePropertyName("sendResultsToClient"u8);
+ writer.WriteBooleanValue(SendResultsToClient.Value);
+ }
+ if (Optional.IsDefined(DeviceCorrelationIdSetInClient))
+ {
+ writer.WritePropertyName("deviceCorrelationIdSetInClient"u8);
+ writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value);
+ }
+ if (Optional.IsDefined(EnableSessionImage))
+ {
+ writer.WritePropertyName("enableSessionImage"u8);
+ writer.WriteBooleanValue(EnableSessionImage.Value);
+ }
+ if (Optional.IsDefined(LivenessSingleModalModel))
+ {
+ writer.WritePropertyName("livenessSingleModalModel"u8);
+ writer.WriteStringValue(LivenessSingleModalModel.Value.ToString());
+ }
+ if (Optional.IsDefined(DeviceCorrelationId))
+ {
+ writer.WritePropertyName("deviceCorrelationId"u8);
+ writer.WriteStringValue(DeviceCorrelationId);
+ }
+ if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds))
+ {
+ writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8);
+ writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value);
+ }
+ if (Optional.IsDefined(ReturnVerifyImageHash))
+ {
+ writer.WritePropertyName("returnVerifyImageHash"u8);
+ writer.WriteBooleanValue(ReturnVerifyImageHash.Value);
+ }
+ if (Optional.IsDefined(VerifyConfidenceThreshold))
+ {
+ writer.WritePropertyName("verifyConfidenceThreshold"u8);
+ writer.WriteNumberValue(VerifyConfidenceThreshold.Value);
}
-#endif
if (options.Format != "W" && _serializedAdditionalRawData != null)
{
foreach (var item in _serializedAdditionalRawData)
@@ -76,20 +106,90 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness
{
return null;
}
- CreateLivenessSessionContent parameters = default;
- Stream verifyImage = default;
+ LivenessOperationMode livenessOperationMode = default;
+ bool? sendResultsToClient = default;
+ bool? deviceCorrelationIdSetInClient = default;
+ bool? enableSessionImage = default;
+ LivenessModel? livenessSingleModalModel = default;
+ string deviceCorrelationId = default;
+ int? authTokenTimeToLiveInSeconds = default;
+ bool? returnVerifyImageHash = default;
+ float? verifyConfidenceThreshold = default;
IDictionary serializedAdditionalRawData = default;
Dictionary rawDataDictionary = new Dictionary();
foreach (var property in element.EnumerateObject())
{
- if (property.NameEquals("Parameters"u8))
+ if (property.NameEquals("livenessOperationMode"u8))
+ {
+ livenessOperationMode = new LivenessOperationMode(property.Value.GetString());
+ continue;
+ }
+ if (property.NameEquals("sendResultsToClient"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ sendResultsToClient = property.Value.GetBoolean();
+ continue;
+ }
+ if (property.NameEquals("deviceCorrelationIdSetInClient"u8))
{
- parameters = CreateLivenessSessionContent.DeserializeCreateLivenessSessionContent(property.Value, options);
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ deviceCorrelationIdSetInClient = property.Value.GetBoolean();
continue;
}
- if (property.NameEquals("VerifyImage"u8))
+ if (property.NameEquals("enableSessionImage"u8))
{
- verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream();
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ enableSessionImage = property.Value.GetBoolean();
+ continue;
+ }
+ if (property.NameEquals("livenessSingleModalModel"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ livenessSingleModalModel = new LivenessModel(property.Value.GetString());
+ continue;
+ }
+ if (property.NameEquals("deviceCorrelationId"u8))
+ {
+ deviceCorrelationId = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("authTokenTimeToLiveInSeconds"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ authTokenTimeToLiveInSeconds = property.Value.GetInt32();
+ continue;
+ }
+ if (property.NameEquals("returnVerifyImageHash"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ returnVerifyImageHash = property.Value.GetBoolean();
+ continue;
+ }
+ if (property.NameEquals("verifyConfidenceThreshold"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ verifyConfidenceThreshold = property.Value.GetSingle();
continue;
}
if (options.Format != "W")
@@ -98,30 +198,17 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness
}
}
serializedAdditionalRawData = rawDataDictionary;
- return new CreateLivenessWithVerifySessionContent(parameters, verifyImage, serializedAdditionalRawData);
- }
-
- private BinaryData SerializeMultipart(ModelReaderWriterOptions options)
- {
- using MultipartFormDataRequestContent content = ToMultipartRequestContent();
- using MemoryStream stream = new MemoryStream();
- content.WriteTo(stream);
- if (stream.Position > int.MaxValue)
- {
- return BinaryData.FromStream(stream);
- }
- else
- {
- return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position));
- }
- }
-
- internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()
- {
- MultipartFormDataRequestContent content = new MultipartFormDataRequestContent();
- content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters");
- content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream");
- return content;
+ return new CreateLivenessWithVerifySessionContent(
+ livenessOperationMode,
+ sendResultsToClient,
+ deviceCorrelationIdSetInClient,
+ enableSessionImage,
+ livenessSingleModalModel,
+ deviceCorrelationId,
+ authTokenTimeToLiveInSeconds,
+ returnVerifyImageHash,
+ verifyConfidenceThreshold,
+ serializedAdditionalRawData);
}
BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
@@ -132,8 +219,6 @@ BinaryData IPersistableModel.Write(Model
{
case "J":
return ModelReaderWriter.Write(this, options);
- case "MFD":
- return SerializeMultipart(options);
default:
throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support writing '{options.Format}' format.");
}
@@ -155,7 +240,7 @@ CreateLivenessWithVerifySessionContent IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD";
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
/// Deserializes the model from a raw response.
/// The response to deserialize the model from.
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs
index 5d6d724fdbda..99e890eaf7e3 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs
@@ -7,12 +7,11 @@
using System;
using System.Collections.Generic;
-using System.IO;
namespace Azure.AI.Vision.Face
{
- /// Request of liveness with verify session creation.
- internal partial class CreateLivenessWithVerifySessionContent
+ /// Request for creating liveness with verify session.
+ public partial class CreateLivenessWithVerifySessionContent
{
///
/// Keeps track of any properties unknown to the library.
@@ -47,26 +46,34 @@ internal partial class CreateLivenessWithVerifySessionContent
private IDictionary _serializedAdditionalRawData;
/// Initializes a new instance of .
- /// The parameters for creating session.
- /// The image stream for verify. Content-Disposition header field for this part must have filename.
- /// or is null.
- public CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage)
+ /// Type of liveness mode the client should follow.
+ public CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode)
{
- Argument.AssertNotNull(parameters, nameof(parameters));
- Argument.AssertNotNull(verifyImage, nameof(verifyImage));
-
- Parameters = parameters;
- VerifyImage = verifyImage;
+ LivenessOperationMode = livenessOperationMode;
}
/// Initializes a new instance of .
- /// The parameters for creating session.
- /// The image stream for verify. Content-Disposition header field for this part must have filename.
+ /// Type of liveness mode the client should follow.
+ /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.
+ /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
+ /// Whether or not store the session image.
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
+ /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
+ /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
+ /// Whether or not return the verify image hash.
+ /// Threshold for confidence of the face verification.
/// Keeps track of any properties unknown to the library.
- internal CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData)
+ internal CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, bool? returnVerifyImageHash, float? verifyConfidenceThreshold, IDictionary serializedAdditionalRawData)
{
- Parameters = parameters;
- VerifyImage = verifyImage;
+ LivenessOperationMode = livenessOperationMode;
+ SendResultsToClient = sendResultsToClient;
+ DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient;
+ EnableSessionImage = enableSessionImage;
+ LivenessSingleModalModel = livenessSingleModalModel;
+ DeviceCorrelationId = deviceCorrelationId;
+ AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds;
+ ReturnVerifyImageHash = returnVerifyImageHash;
+ VerifyConfidenceThreshold = verifyConfidenceThreshold;
_serializedAdditionalRawData = serializedAdditionalRawData;
}
@@ -75,9 +82,23 @@ internal CreateLivenessWithVerifySessionContent()
{
}
- /// The parameters for creating session.
- public CreateLivenessSessionContent Parameters { get; }
- /// The image stream for verify. Content-Disposition header field for this part must have filename.
- public Stream VerifyImage { get; }
+ /// Type of liveness mode the client should follow.
+ public LivenessOperationMode LivenessOperationMode { get; }
+ /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.
+ public bool? SendResultsToClient { get; set; }
+ /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.
+ public bool? DeviceCorrelationIdSetInClient { get; set; }
+ /// Whether or not store the session image.
+ public bool? EnableSessionImage { get; set; }
+ /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen.
+ public LivenessModel? LivenessSingleModalModel { get; set; }
+ /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.
+ public string DeviceCorrelationId { get; set; }
+ /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.
+ public int? AuthTokenTimeToLiveInSeconds { get; set; }
+ /// Whether or not return the verify image hash.
+ public bool? ReturnVerifyImageHash { get; set; }
+ /// Threshold for confidence of the face verification.
+ public float? VerifyConfidenceThreshold { get; set; }
}
}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs
new file mode 100644
index 000000000000..cccaed59a390
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs
@@ -0,0 +1,176 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.IO;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class CreateLivenessWithVerifySessionMultipartContent : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("Parameters"u8);
+ writer.WriteObjectValue(Parameters, options);
+ writer.WritePropertyName("VerifyImage"u8);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage));
+#else
+ using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage)))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ CreateLivenessWithVerifySessionMultipartContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options);
+ }
+
+ internal static CreateLivenessWithVerifySessionMultipartContent DeserializeCreateLivenessWithVerifySessionMultipartContent(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ CreateLivenessWithVerifySessionContent parameters = default;
+ Stream verifyImage = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("Parameters"u8))
+ {
+ parameters = CreateLivenessWithVerifySessionContent.DeserializeCreateLivenessWithVerifySessionContent(property.Value, options);
+ continue;
+ }
+ if (property.NameEquals("VerifyImage"u8))
+ {
+ verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new CreateLivenessWithVerifySessionMultipartContent(parameters, verifyImage, serializedAdditionalRawData);
+ }
+
+ private BinaryData SerializeMultipart(ModelReaderWriterOptions options)
+ {
+ using MultipartFormDataRequestContent content = ToMultipartRequestContent();
+ using MemoryStream stream = new MemoryStream();
+ content.WriteTo(stream);
+ if (stream.Position > int.MaxValue)
+ {
+ return BinaryData.FromStream(stream);
+ }
+ else
+ {
+ return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position));
+ }
+ }
+
+ internal virtual MultipartFormDataRequestContent ToMultipartRequestContent()
+ {
+ MultipartFormDataRequestContent content = new MultipartFormDataRequestContent();
+ content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters");
+ content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream");
+ return content;
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ case "MFD":
+ return SerializeMultipart(options);
+ default:
+ throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ CreateLivenessWithVerifySessionMultipartContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static CreateLivenessWithVerifySessionMultipartContent FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs
new file mode 100644
index 000000000000..972c14733528
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Request of liveness with verify session creation.
+ internal partial class CreateLivenessWithVerifySessionMultipartContent
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// The parameters for creating session.
+ /// The image stream for verify. Content-Disposition header field for this part must have filename.
+ /// or is null.
+ public CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage)
+ {
+ Argument.AssertNotNull(parameters, nameof(parameters));
+ Argument.AssertNotNull(verifyImage, nameof(verifyImage));
+
+ Parameters = parameters;
+ VerifyImage = verifyImage;
+ }
+
+ /// Initializes a new instance of .
+ /// The parameters for creating session.
+ /// The image stream for verify. Content-Disposition header field for this part must have filename.
+ /// Keeps track of any properties unknown to the library.
+ internal CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData)
+ {
+ Parameters = parameters;
+ VerifyImage = verifyImage;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal CreateLivenessWithVerifySessionMultipartContent()
+ {
+ }
+
+ /// The parameters for creating session.
+ public CreateLivenessWithVerifySessionContent Parameters { get; }
+ /// The image stream for verify. Content-Disposition header field for this part must have filename.
+ public Stream VerifyImage { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs
new file mode 100644
index 000000000000..682c4f08d863
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs
@@ -0,0 +1,146 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class CreatePersonRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("name"u8);
+ writer.WriteStringValue(Name);
+ if (Optional.IsDefined(UserData))
+ {
+ writer.WritePropertyName("userData"u8);
+ writer.WriteStringValue(UserData);
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ CreatePersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeCreatePersonRequest(document.RootElement, options);
+ }
+
+ internal static CreatePersonRequest DeserializeCreatePersonRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ string name = default;
+ string userData = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("name"u8))
+ {
+ name = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("userData"u8))
+ {
+ userData = property.Value.GetString();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new CreatePersonRequest(name, userData, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ CreatePersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeCreatePersonRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static CreatePersonRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeCreatePersonRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs
new file mode 100644
index 000000000000..5f2cefbcce5b
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs
@@ -0,0 +1,79 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The CreatePersonRequest.
+ internal partial class CreatePersonRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// is null.
+ internal CreatePersonRequest(string name)
+ {
+ Argument.AssertNotNull(name, nameof(name));
+
+ Name = name;
+ }
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// Keeps track of any properties unknown to the library.
+ internal CreatePersonRequest(string name, string userData, IDictionary serializedAdditionalRawData)
+ {
+ Name = name;
+ UserData = userData;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal CreatePersonRequest()
+ {
+ }
+
+ /// User defined name, maximum length is 128.
+ public string Name { get; }
+ /// Optional user defined data. Length should not exceed 16K.
+ public string UserData { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs
new file mode 100644
index 000000000000..e0afa23647b8
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs
@@ -0,0 +1,135 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ public partial class CreatePersonResult : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("personId"u8);
+ writer.WriteStringValue(PersonId);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ CreatePersonResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeCreatePersonResult(document.RootElement, options);
+ }
+
+ internal static CreatePersonResult DeserializeCreatePersonResult(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Guid personId = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("personId"u8))
+ {
+ personId = property.Value.GetGuid();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new CreatePersonResult(personId, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ CreatePersonResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeCreatePersonResult(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static CreatePersonResult FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeCreatePersonResult(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs
new file mode 100644
index 000000000000..d2b993228dd6
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Response of create person.
+ public partial class CreatePersonResult
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// Person ID of the person.
+ internal CreatePersonResult(Guid personId)
+ {
+ PersonId = personId;
+ }
+
+ /// Initializes a new instance of .
+ /// Person ID of the person.
+ /// Keeps track of any properties unknown to the library.
+ internal CreatePersonResult(Guid personId, IDictionary serializedAdditionalRawData)
+ {
+ PersonId = personId;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal CreatePersonResult()
+ {
+ }
+
+ /// Person ID of the person.
+ public Guid PersonId { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs
new file mode 100644
index 000000000000..5dddb239ac5c
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs
@@ -0,0 +1,161 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class CreateRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("name"u8);
+ writer.WriteStringValue(Name);
+ if (Optional.IsDefined(UserData))
+ {
+ writer.WritePropertyName("userData"u8);
+ writer.WriteStringValue(UserData);
+ }
+ if (Optional.IsDefined(RecognitionModel))
+ {
+ writer.WritePropertyName("recognitionModel"u8);
+ writer.WriteStringValue(RecognitionModel.Value.ToString());
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ CreateRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeCreateRequest(document.RootElement, options);
+ }
+
+ internal static CreateRequest DeserializeCreateRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ string name = default;
+ string userData = default;
+ FaceRecognitionModel? recognitionModel = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("name"u8))
+ {
+ name = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("userData"u8))
+ {
+ userData = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("recognitionModel"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ recognitionModel = new FaceRecognitionModel(property.Value.GetString());
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new CreateRequest(name, userData, recognitionModel, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ CreateRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeCreateRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static CreateRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeCreateRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs
new file mode 100644
index 000000000000..28b037d301c9
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The CreateRequest.
+ internal partial class CreateRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// is null.
+ internal CreateRequest(string name)
+ {
+ Argument.AssertNotNull(name, nameof(name));
+
+ Name = name;
+ }
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ /// Keeps track of any properties unknown to the library.
+ internal CreateRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData)
+ {
+ Name = name;
+ UserData = userData;
+ RecognitionModel = recognitionModel;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal CreateRequest()
+ {
+ }
+
+ /// User defined name, maximum length is 128.
+ public string Name { get; }
+ /// Optional user defined data. Length should not exceed 16K.
+ public string UserData { get; }
+ /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ public FaceRecognitionModel? RecognitionModel { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs
new file mode 100644
index 000000000000..2d7461262254
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs
@@ -0,0 +1,161 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class CreateRequest1 : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("name"u8);
+ writer.WriteStringValue(Name);
+ if (Optional.IsDefined(UserData))
+ {
+ writer.WritePropertyName("userData"u8);
+ writer.WriteStringValue(UserData);
+ }
+ if (Optional.IsDefined(RecognitionModel))
+ {
+ writer.WritePropertyName("recognitionModel"u8);
+ writer.WriteStringValue(RecognitionModel.Value.ToString());
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ CreateRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeCreateRequest1(document.RootElement, options);
+ }
+
+ internal static CreateRequest1 DeserializeCreateRequest1(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ string name = default;
+ string userData = default;
+ FaceRecognitionModel? recognitionModel = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("name"u8))
+ {
+ name = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("userData"u8))
+ {
+ userData = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("recognitionModel"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ recognitionModel = new FaceRecognitionModel(property.Value.GetString());
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new CreateRequest1(name, userData, recognitionModel, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ CreateRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeCreateRequest1(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static CreateRequest1 FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeCreateRequest1(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs
new file mode 100644
index 000000000000..6c354abd8419
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The CreateRequest1.
+ internal partial class CreateRequest1
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// is null.
+ internal CreateRequest1(string name)
+ {
+ Argument.AssertNotNull(name, nameof(name));
+
+ Name = name;
+ }
+
+ /// Initializes a new instance of .
+ /// User defined name, maximum length is 128.
+ /// Optional user defined data. Length should not exceed 16K.
+ /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ /// Keeps track of any properties unknown to the library.
+ internal CreateRequest1(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData)
+ {
+ Name = name;
+ UserData = userData;
+ RecognitionModel = recognitionModel;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal CreateRequest1()
+ {
+ }
+
+ /// User defined name, maximum length is 128.
+ public string Name { get; }
+ /// Optional user defined data. Length should not exceed 16K.
+ public string UserData { get; }
+ /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ public FaceRecognitionModel? RecognitionModel { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs
new file mode 100644
index 000000000000..df96544d127a
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs
@@ -0,0 +1,135 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class DetectFromSessionImageRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("sessionImageId"u8);
+ writer.WriteStringValue(SessionImageId);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ DetectFromSessionImageRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeDetectFromSessionImageRequest(document.RootElement, options);
+ }
+
+ internal static DetectFromSessionImageRequest DeserializeDetectFromSessionImageRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ string sessionImageId = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("sessionImageId"u8))
+ {
+ sessionImageId = property.Value.GetString();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new DetectFromSessionImageRequest(sessionImageId, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ DetectFromSessionImageRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeDetectFromSessionImageRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static DetectFromSessionImageRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeDetectFromSessionImageRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs
new file mode 100644
index 000000000000..637773ec68ff
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The DetectFromSessionImageRequest.
+ internal partial class DetectFromSessionImageRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// Id of session image.
+ /// is null.
+ internal DetectFromSessionImageRequest(string sessionImageId)
+ {
+ Argument.AssertNotNull(sessionImageId, nameof(sessionImageId));
+
+ SessionImageId = sessionImageId;
+ }
+
+ /// Initializes a new instance of .
+ /// Id of session image.
+ /// Keeps track of any properties unknown to the library.
+ internal DetectFromSessionImageRequest(string sessionImageId, IDictionary serializedAdditionalRawData)
+ {
+ SessionImageId = sessionImageId;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal DetectFromSessionImageRequest()
+ {
+ }
+
+ /// Id of session image.
+ public string SessionImageId { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml
index 14e05d4067a3..7568dc79da64 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml
@@ -213,6 +213,208 @@ Response response = client.Group(content);
JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
Console.WriteLine(result.GetProperty("groups")[0][0].ToString());
Console.WriteLine(result.GetProperty("messyGroup")[0].ToString());
+]]>
+
+
+
+This sample shows how to call FindSimilarFromLargeFaceListAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id");
+]]>
+
+
+
+This sample shows how to call FindSimilarFromLargeFaceList.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id");
+]]>
+
+
+
+This sample shows how to call FindSimilarFromLargeFaceListAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceId = "c5c24a82-6845-4031-9d5d-978df9175426",
+ maxNumOfCandidatesReturned = 3,
+ mode = "matchPerson",
+ largeFaceListId = "your_large_face_list_id",
+});
+Response response = await client.FindSimilarFromLargeFaceListAsync(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("confidence").ToString());
+]]>
+
+
+
+This sample shows how to call FindSimilarFromLargeFaceList and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceId = "c5c24a82-6845-4031-9d5d-978df9175426",
+ maxNumOfCandidatesReturned = 3,
+ mode = "matchPerson",
+ largeFaceListId = "your_large_face_list_id",
+});
+Response response = client.FindSimilarFromLargeFaceList(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("confidence").ToString());
+]]>
+
+
+
+This sample shows how to call IdentifyFromLargePersonGroupAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id");
+]]>
+
+
+
+This sample shows how to call IdentifyFromLargePersonGroup.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id");
+]]>
+
+
+
+This sample shows how to call IdentifyFromLargePersonGroupAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceIds = new object[]
+ {
+ "c5c24a82-6845-4031-9d5d-978df9175426"
+ },
+ largePersonGroupId = "your_large_person_group_id",
+ maxNumOfCandidatesReturned = 9,
+ confidenceThreshold = 0.7,
+});
+Response response = await client.IdentifyFromLargePersonGroupAsync(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("faceId").ToString());
+Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString());
+Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString());
+]]>
+
+
+
+This sample shows how to call IdentifyFromLargePersonGroup and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceIds = new object[]
+ {
+ "c5c24a82-6845-4031-9d5d-978df9175426"
+ },
+ largePersonGroupId = "your_large_person_group_id",
+ maxNumOfCandidatesReturned = 9,
+ confidenceThreshold = 0.7,
+});
+Response response = client.IdentifyFromLargePersonGroup(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("faceId").ToString());
+Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString());
+Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString());
+]]>
+
+
+
+This sample shows how to call VerifyFromLargePersonGroupAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c"));
+]]>
+
+
+
+This sample shows how to call VerifyFromLargePersonGroup.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+Response response = client.VerifyFromLargePersonGroup(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c"));
+]]>
+
+
+
+This sample shows how to call VerifyFromLargePersonGroupAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceId = "c5c24a82-6845-4031-9d5d-978df9175426",
+ personId = "815df99c-598f-4926-930a-a734b3fd651c",
+ largePersonGroupId = "your_large_person_group",
+});
+Response response = await client.VerifyFromLargePersonGroupAsync(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("isIdentical").ToString());
+Console.WriteLine(result.GetProperty("confidence").ToString());
+]]>
+
+
+
+This sample shows how to call VerifyFromLargePersonGroup and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceClient client = new FaceClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ faceId = "c5c24a82-6845-4031-9d5d-978df9175426",
+ personId = "815df99c-598f-4926-930a-a734b3fd651c",
+ largePersonGroupId = "your_large_person_group",
+});
+Response response = client.VerifyFromLargePersonGroup(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("isIdentical").ToString());
+Console.WriteLine(result.GetProperty("confidence").ToString());
]]>
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml
index 3ab65974a572..3754a0217479 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml
@@ -497,6 +497,120 @@ Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString
Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString());
Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString());
Console.WriteLine(result[0].GetProperty("digest").ToString());
+]]>
+
+
+
+This sample shows how to call DetectFromSessionImageAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response> response = await client.DetectFromSessionImageAsync("aa93ce80-9a9b-48bd-ae1a-1c7543841e92");
+]]>
+
+
+
+This sample shows how to call DetectFromSessionImage.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response> response = client.DetectFromSessionImage("aa93ce80-9a9b-48bd-ae1a-1c7543841e92");
+]]>
+
+
+
+This sample shows how to call DetectFromSessionImageAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92",
+});
+Response response = await client.DetectFromSessionImageAsync(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString());
+]]>
+
+
+
+This sample shows how to call DetectFromSessionImage and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+using RequestContent content = RequestContent.Create(new
+{
+ sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92",
+});
+Response response = client.DetectFromSessionImage(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString());
+Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString());
+]]>
+
+
+
+This sample shows how to call GetSessionImageAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa");
+]]>
+
+
+
+This sample shows how to call GetSessionImage.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa");
+]]>
+
+
+
+This sample shows how to call GetSessionImageAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa", null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.ToString());
+]]>
+
+
+
+This sample shows how to call GetSessionImage and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+FaceSessionClient client = new FaceSessionClient(endpoint, credential);
+
+Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa", null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.ToString());
]]>
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml
new file mode 100644
index 000000000000..a1da24478c39
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClientImpl.xml
@@ -0,0 +1,469 @@
+
+
+
+
+
+This sample shows how to call CreateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.CreateAsync("your_large_face_list_name");
+]]>
+
+
+
+This sample shows how to call Create.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.Create("your_large_face_list_name");
+]]>
+
+
+
+This sample shows how to call CreateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_face_list_name",
+ userData = "your_user_data",
+ recognitionModel = "recognition_01",
+});
+Response response = await client.CreateAsync(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Create.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_face_list_name",
+ userData = "your_user_data",
+ recognitionModel = "recognition_01",
+});
+Response response = client.Create(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call DeleteAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.DeleteAsync();
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Delete.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.Delete();
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetLargeFaceListAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetLargeFaceListAsync();
+]]>
+
+
+
+This sample shows how to call GetLargeFaceList.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetLargeFaceList();
+]]>
+
+
+
+This sample shows how to call GetLargeFaceListAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetLargeFaceListAsync(true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("name").ToString());
+Console.WriteLine(result.GetProperty("largeFaceListId").ToString());
+]]>
+
+
+
+This sample shows how to call GetLargeFaceList and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetLargeFaceList(true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("name").ToString());
+Console.WriteLine(result.GetProperty("largeFaceListId").ToString());
+]]>
+
+
+
+This sample shows how to call UpdateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_face_list_name",
+ userData = "your_user_data",
+});
+Response response = await client.UpdateAsync(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Update.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_face_list_name",
+ userData = "your_user_data",
+});
+Response response = client.Update(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetLargeFaceListsAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null);
+
+Response> response = await client.GetLargeFaceListsAsync();
+]]>
+
+
+
+This sample shows how to call GetLargeFaceLists.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null);
+
+Response> response = client.GetLargeFaceLists();
+]]>
+
+
+
+This sample shows how to call GetLargeFaceListsAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null);
+
+Response response = await client.GetLargeFaceListsAsync("my_list_id", 20, true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("name").ToString());
+Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString());
+]]>
+
+
+
+This sample shows how to call GetLargeFaceLists and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient(null);
+
+Response response = client.GetLargeFaceLists("my_list_id", 20, true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("name").ToString());
+Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString());
+]]>
+
+
+
+This sample shows how to call GetTrainingStatusAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetTrainingStatusAsync();
+]]>
+
+
+
+This sample shows how to call GetTrainingStatus.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetTrainingStatus();
+]]>
+
+
+
+This sample shows how to call GetTrainingStatusAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetTrainingStatusAsync(null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("status").ToString());
+Console.WriteLine(result.GetProperty("createdDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastActionDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString());
+]]>
+
+
+
+This sample shows how to call GetTrainingStatus and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetTrainingStatus(null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("status").ToString());
+Console.WriteLine(result.GetProperty("createdDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastActionDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString());
+]]>
+
+
+
+This sample shows how to call DeleteFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.DeleteFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call DeleteFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.DeleteFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+]]>
+
+
+
+This sample shows how to call GetFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+]]>
+
+
+
+This sample shows how to call GetFaceAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call GetFace and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call UpdateFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ userData = "your_user_data",
+});
+Response response = await client.UpdateFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call UpdateFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ userData = "your_user_data",
+});
+Response response = client.UpdateFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetFacesAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response> response = await client.GetFacesAsync();
+]]>
+
+
+
+This sample shows how to call GetFaces.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response> response = client.GetFaces();
+]]>
+
+
+
+This sample shows how to call GetFacesAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = await client.GetFacesAsync("00000000-0000-0000-0000-000000000000", 20, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call GetFaces and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Response response = client.GetFaces("00000000-0000-0000-0000-000000000000", 20, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call TrainAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Operation operation = await client.TrainAsync(WaitUntil.Completed);
+]]>
+
+
+
+This sample shows how to call Train.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargeFaceListClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClientImplClient("your_large_face_list_id");
+
+Operation operation = client.Train(WaitUntil.Completed);
+]]>
+
+
+
\ No newline at end of file
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml
new file mode 100644
index 000000000000..5d708b357ab4
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClientImpl.xml
@@ -0,0 +1,645 @@
+
+
+
+
+
+This sample shows how to call CreateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.CreateAsync("your_large_person_group_name");
+]]>
+
+
+
+This sample shows how to call Create.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.Create("your_large_person_group_name");
+]]>
+
+
+
+This sample shows how to call CreateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_name",
+ userData = "your_user_data",
+ recognitionModel = "recognition_01",
+});
+Response response = await client.CreateAsync(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Create.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_name",
+ userData = "your_user_data",
+ recognitionModel = "recognition_01",
+});
+Response response = client.Create(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call DeleteAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.DeleteAsync();
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Delete.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.Delete();
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroupAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetLargePersonGroupAsync();
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroup.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetLargePersonGroup();
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroupAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetLargePersonGroupAsync(true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("name").ToString());
+Console.WriteLine(result.GetProperty("largePersonGroupId").ToString());
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroup and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetLargePersonGroup(true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("name").ToString());
+Console.WriteLine(result.GetProperty("largePersonGroupId").ToString());
+]]>
+
+
+
+This sample shows how to call UpdateAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_name",
+ userData = "your_user_data",
+});
+Response response = await client.UpdateAsync(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call Update.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_name",
+ userData = "your_user_data",
+});
+Response response = client.Update(content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroupsAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null);
+
+Response> response = await client.GetLargePersonGroupsAsync();
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroups.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null);
+
+Response> response = client.GetLargePersonGroups();
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroupsAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null);
+
+Response response = await client.GetLargePersonGroupsAsync("00000000-0000-0000-0000-000000000000", 20, true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("name").ToString());
+Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString());
+]]>
+
+
+
+This sample shows how to call GetLargePersonGroups and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient(null);
+
+Response response = client.GetLargePersonGroups("00000000-0000-0000-0000-000000000000", 20, true, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("name").ToString());
+Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString());
+]]>
+
+
+
+This sample shows how to call GetTrainingStatusAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetTrainingStatusAsync();
+]]>
+
+
+
+This sample shows how to call GetTrainingStatus.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetTrainingStatus();
+]]>
+
+
+
+This sample shows how to call GetTrainingStatusAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetTrainingStatusAsync(null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("status").ToString());
+Console.WriteLine(result.GetProperty("createdDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastActionDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString());
+]]>
+
+
+
+This sample shows how to call GetTrainingStatus and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetTrainingStatus(null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("status").ToString());
+Console.WriteLine(result.GetProperty("createdDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastActionDateTime").ToString());
+Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString());
+]]>
+
+
+
+This sample shows how to call CreatePersonAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.CreatePersonAsync("your_large_person_group_person_name");
+]]>
+
+
+
+This sample shows how to call CreatePerson.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.CreatePerson("your_large_person_group_person_name");
+]]>
+
+
+
+This sample shows how to call CreatePersonAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_person_name",
+ userData = "your_user_data",
+});
+Response response = await client.CreatePersonAsync(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("personId").ToString());
+]]>
+
+
+
+This sample shows how to call CreatePerson and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_person_name",
+ userData = "your_user_data",
+});
+Response response = client.CreatePerson(content);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("personId").ToString());
+]]>
+
+
+
+This sample shows how to call DeletePersonAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.DeletePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call DeletePerson.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.DeletePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetPersonAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"));
+]]>
+
+
+
+This sample shows how to call GetPerson.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"));
+]]>
+
+
+
+This sample shows how to call GetPersonAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("personId").ToString());
+Console.WriteLine(result.GetProperty("name").ToString());
+]]>
+
+
+
+This sample shows how to call GetPerson and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("personId").ToString());
+Console.WriteLine(result.GetProperty("name").ToString());
+]]>
+
+
+
+This sample shows how to call UpdatePersonAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_person_name",
+ userData = "your_user_data",
+});
+Response response = await client.UpdatePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call UpdatePerson.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ name = "your_large_person_group_person_name",
+ userData = "your_user_data",
+});
+Response response = client.UpdatePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetPersonsAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response> response = await client.GetPersonsAsync();
+]]>
+
+
+
+This sample shows how to call GetPersons.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response> response = client.GetPersons();
+]]>
+
+
+
+This sample shows how to call GetPersonsAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetPersonsAsync("00000000-0000-0000-0000-000000000000", 20, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("personId").ToString());
+Console.WriteLine(result[0].GetProperty("name").ToString());
+]]>
+
+
+
+This sample shows how to call GetPersons and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetPersons("00000000-0000-0000-0000-000000000000", 20, null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result[0].GetProperty("personId").ToString());
+Console.WriteLine(result[0].GetProperty("name").ToString());
+]]>
+
+
+
+This sample shows how to call DeleteFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.DeleteFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call DeleteFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.DeleteFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call GetFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+]]>
+
+
+
+This sample shows how to call GetFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"));
+]]>
+
+
+
+This sample shows how to call GetFaceAsync and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call GetFace and parse the result.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null);
+
+JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement;
+Console.WriteLine(result.GetProperty("persistedFaceId").ToString());
+]]>
+
+
+
+This sample shows how to call UpdateFaceAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ userData = "your_user_data",
+});
+Response response = await client.UpdateFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call UpdateFace.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+using RequestContent content = RequestContent.Create(new
+{
+ userData = "your_user_data",
+});
+Response response = client.UpdateFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content);
+
+Console.WriteLine(response.Status);
+]]>
+
+
+
+This sample shows how to call TrainAsync.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Operation operation = await client.TrainAsync(WaitUntil.Completed);
+]]>
+
+
+
+This sample shows how to call Train.
+");
+AzureKeyCredential credential = new AzureKeyCredential("");
+LargePersonGroupClientImpl client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClientImplClient("your_large_person_group_id");
+
+Operation operation = client.Train(WaitUntil.Completed);
+]]>
+
+
+
\ No newline at end of file
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs
new file mode 100644
index 000000000000..23c0a1e61880
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs
@@ -0,0 +1,123 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using Azure.Core;
+using Azure.Core.Pipeline;
+
+namespace Azure.AI.Vision.Face
+{
+ // Data plane generated client.
+ /// The FaceAdministration service client.
+ public partial class FaceAdministrationClient
+ {
+ private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key";
+ private readonly AzureKeyCredential _keyCredential;
+ private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" };
+ private readonly TokenCredential _tokenCredential;
+ private readonly HttpPipeline _pipeline;
+ private readonly Uri _endpoint;
+ private readonly string _apiVersion;
+
+ /// The ClientDiagnostics is used to provide tracing support for the client library.
+ internal ClientDiagnostics ClientDiagnostics { get; }
+
+ /// The HTTP pipeline for sending and receiving REST requests and responses.
+ public virtual HttpPipeline Pipeline => _pipeline;
+
+ /// Initializes a new instance of FaceAdministrationClient for mocking.
+ protected FaceAdministrationClient()
+ {
+ }
+
+ /// Initializes a new instance of FaceAdministrationClient.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ /// A credential used to authenticate to an Azure Service.
+ /// or is null.
+ public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions())
+ {
+ }
+
+ /// Initializes a new instance of FaceAdministrationClient.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ /// A credential used to authenticate to an Azure Service.
+ /// or is null.
+ public FaceAdministrationClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions())
+ {
+ }
+
+ /// Initializes a new instance of FaceAdministrationClient.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ /// A credential used to authenticate to an Azure Service.
+ /// The options for configuring the client.
+ /// or is null.
+ public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options)
+ {
+ Argument.AssertNotNull(endpoint, nameof(endpoint));
+ Argument.AssertNotNull(credential, nameof(credential));
+ options ??= new AzureAIVisionFaceClientOptions();
+
+ ClientDiagnostics = new ClientDiagnostics(options, true);
+ _keyCredential = credential;
+ _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier());
+ _endpoint = endpoint;
+ _apiVersion = options.Version;
+ }
+
+ /// Initializes a new instance of FaceAdministrationClient.
+ ///
+ /// Supported Cognitive Services endpoints (protocol and hostname, for example:
+ /// https://{resource-name}.cognitiveservices.azure.com).
+ ///
+ /// A credential used to authenticate to an Azure Service.
+ /// The options for configuring the client.
+ /// or is null.
+ public FaceAdministrationClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options)
+ {
+ Argument.AssertNotNull(endpoint, nameof(endpoint));
+ Argument.AssertNotNull(credential, nameof(credential));
+ options ??= new AzureAIVisionFaceClientOptions();
+
+ ClientDiagnostics = new ClientDiagnostics(options, true);
+ _tokenCredential = credential;
+ _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier());
+ _endpoint = endpoint;
+ _apiVersion = options.Version;
+ }
+
+ /// Initializes a new instance of LargeFaceListClientImpl.
+ /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ public virtual LargeFaceListClientImpl GetLargeFaceListClientImplClient(string largeFaceListId)
+ {
+ Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId));
+
+ return new LargeFaceListClientImpl(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largeFaceListId, _apiVersion);
+ }
+
+ /// Initializes a new instance of LargePersonGroupClientImpl.
+ /// ID of the container.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ public virtual LargePersonGroupClientImpl GetLargePersonGroupClientImplClient(string largePersonGroupId)
+ {
+ Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId));
+
+ return new LargePersonGroupClientImpl(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largePersonGroupId, _apiVersion);
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs
index 1a0b973e9201..7cb8c3849ea7 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs
@@ -105,7 +105,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli
/// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
/// URL of input image.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -114,22 +114,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli
/// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
/// The cancellation token to use.
/// is null.
- ///
- /// > [!IMPORTANT]
- /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.
- ///
- /// *
- /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.
- /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.
- /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
- /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
- /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
- /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
- /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model
- /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.
- /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model.
- /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details.
internal virtual async Task>> DetectFromUrlImplAsync(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(uri, nameof(uri));
@@ -150,7 +135,7 @@ internal virtual async Task>> Detect
/// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
/// URL of input image.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -159,22 +144,7 @@ internal virtual async Task>> Detect
/// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
/// The cancellation token to use.
/// is null.
- ///
- /// > [!IMPORTANT]
- /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.
- ///
- /// *
- /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.
- /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.
- /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
- /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
- /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
- /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
- /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model
- /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.
- /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model.
- /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details.
internal virtual Response> DetectFromUrlImpl(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(uri, nameof(uri));
@@ -209,7 +179,7 @@ internal virtual Response> DetectFromUrlImpl(
///
///
/// The content to send as the body of the request.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -254,7 +224,7 @@ internal virtual async Task DetectFromUrlImplAsync(RequestContent cont
///
///
/// The content to send as the body of the request.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -285,7 +255,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec
/// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
/// The input image binary.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -294,22 +264,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec
/// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
/// The cancellation token to use.
/// is null.
- ///
- /// > [!IMPORTANT]
- /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.
- ///
- /// *
- /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.
- /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.
- /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
- /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
- /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
- /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
- /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model
- /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.
- /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model.
- /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details.
internal virtual async Task>> DetectImplAsync(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(imageContent, nameof(imageContent));
@@ -330,7 +285,7 @@ internal virtual async Task>> Detect
/// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
/// The input image binary.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -339,22 +294,7 @@ internal virtual async Task>> Detect
/// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
/// The cancellation token to use.
/// is null.
- ///
- /// > [!IMPORTANT]
- /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.
- ///
- /// *
- /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.
- /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.
- /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
- /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
- /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
- /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
- /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model
- /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.
- /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model.
- /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details.
internal virtual Response> DetectImpl(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(imageContent, nameof(imageContent));
@@ -389,7 +329,7 @@ internal virtual Response> DetectImpl(BinaryD
///
///
/// The content to send as the body of the request.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -434,7 +374,7 @@ internal virtual async Task DetectImplAsync(RequestContent content, st
///
///
/// The content to send as the body of the request.
- /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
/// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
/// Return faceIds of the detected faces or not. The default value is true.
/// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
@@ -470,13 +410,7 @@ internal virtual Response DetectImpl(RequestContent content, string detectionMod
/// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
/// The cancellation token to use.
/// is null.
- ///
- /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.
- ///
- /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.
- ///
- /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details.
///
public virtual async Task>> FindSimilarAsync(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default)
{
@@ -503,13 +437,7 @@ public virtual async Task>> FindSi
/// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
/// The cancellation token to use.
/// is null.
- ///
- /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.
- ///
- /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.
- ///
- /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details.
///
public virtual Response> FindSimilar(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default)
{
@@ -611,14 +539,7 @@ public virtual Response FindSimilar(RequestContent content, RequestContext conte
/// The faceId of one face, come from "Detect".
/// The faceId of another face, come from "Detect".
/// The cancellation token to use.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
- /// > * For the scenarios that are sensitive to accuracy please make your own judgment.
- /// > * The 'recognitionModel' associated with the both faces should be the same.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details.
///
public virtual async Task> VerifyFaceToFaceAsync(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default)
{
@@ -632,14 +553,7 @@ public virtual async Task> VerifyFaceToFaceAsyn
/// The faceId of one face, come from "Detect".
/// The faceId of another face, come from "Detect".
/// The cancellation token to use.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
- /// > * For the scenarios that are sensitive to accuracy please make your own judgment.
- /// > * The 'recognitionModel' associated with the both faces should be the same.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details.
///
public virtual Response VerifyFaceToFace(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default)
{
@@ -731,14 +645,7 @@ public virtual Response VerifyFaceToFace(RequestContent content, RequestContext
/// Array of candidate faceIds created by "Detect". The maximum is 1000 faces.
/// The cancellation token to use.
/// is null.
- ///
- /// >
- /// *
- /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result.
- /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts.
- /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces.
- /// * The 'recognitionModel' associated with the query faces' faceIds should be the same.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details.
///
public virtual async Task> GroupAsync(IEnumerable faceIds, CancellationToken cancellationToken = default)
{
@@ -754,14 +661,7 @@ public virtual async Task> GroupAsync(IEnumerable Array of candidate faceIds created by "Detect". The maximum is 1000 faces.
/// The cancellation token to use.
/// is null.
- ///
- /// >
- /// *
- /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result.
- /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts.
- /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces.
- /// * The 'recognitionModel' associated with the query faces' faceIds should be the same.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details.
///
public virtual Response Group(IEnumerable faceIds, CancellationToken cancellationToken = default)
{
@@ -851,6 +751,386 @@ public virtual Response Group(RequestContent content, RequestContext context = n
}
}
+ /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.
+ /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.
+ /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.
+ /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.
+ /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details.
+ ///
+ public virtual async Task>> FindSimilarFromLargeFaceListAsync(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId));
+
+ FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = await FindSimilarFromLargeFaceListAsync(findSimilarFromLargeFaceListRequest.ToRequestContent(), context).ConfigureAwait(false);
+ IReadOnlyList value = default;
+ using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.
+ /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.
+ /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.
+ /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.
+ /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details.
+ ///
+ public virtual Response> FindSimilarFromLargeFaceList(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId));
+
+ FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = FindSimilarFromLargeFaceList(findSimilarFromLargeFaceListRequest.ToRequestContent(), context);
+ IReadOnlyList value = default;
+ using var document = JsonDocument.Parse(response.ContentStream);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ ///
+ /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual async Task FindSimilarFromLargeFaceListAsync(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context);
+ return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ ///
+ /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual Response FindSimilarFromLargeFaceList(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context);
+ return _pipeline.ProcessMessage(message, context);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.
+ /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].
+ /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time.
+ /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.
+ /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.
+ /// The cancellation token to use.
+ /// or is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details.
+ ///
+ public virtual async Task>> IdentifyFromLargePersonGroupAsync(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(faceIds, nameof(faceIds));
+ Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId));
+
+ IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = await IdentifyFromLargePersonGroupAsync(identifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false);
+ IReadOnlyList value = default;
+ using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.
+ /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].
+ /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time.
+ /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.
+ /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.
+ /// The cancellation token to use.
+ /// or is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details.
+ ///
+ public virtual Response> IdentifyFromLargePersonGroup(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(faceIds, nameof(faceIds));
+ Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId));
+
+ IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = IdentifyFromLargePersonGroup(identifyFromLargePersonGroupRequest.ToRequestContent(), context);
+ IReadOnlyList value = default;
+ using var document = JsonDocument.Parse(response.ContentStream);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ ///
+ /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual async Task IdentifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context);
+ return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ ///
+ /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual Response IdentifyFromLargePersonGroup(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context);
+ return _pipeline.ProcessMessage(message, context);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ /// Verify whether a face belongs to a person in a Large Person Group.
+ /// The faceId of the face, come from "Detect".
+ /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group".
+ /// Specify a certain person in Large Person Group.
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details.
+ ///
+ public virtual async Task> VerifyFromLargePersonGroupAsync(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId));
+
+ VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = await VerifyFromLargePersonGroupAsync(verifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false);
+ return Response.FromValue(FaceVerificationResult.FromResponse(response), response);
+ }
+
+ /// Verify whether a face belongs to a person in a Large Person Group.
+ /// The faceId of the face, come from "Detect".
+ /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group".
+ /// Specify a certain person in Large Person Group.
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details.
+ ///
+ public virtual Response VerifyFromLargePersonGroup(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId));
+
+ VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = VerifyFromLargePersonGroup(verifyFromLargePersonGroupRequest.ToRequestContent(), context);
+ return Response.FromValue(FaceVerificationResult.FromResponse(response), response);
+ }
+
+ ///
+ /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual async Task VerifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context);
+ return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ ///
+ /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual Response VerifyFromLargePersonGroup(RequestContent content, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context);
+ return _pipeline.ProcessMessage(message, context);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
internal HttpMessage CreateDetectFromUrlImplRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context)
{
var message = _pipeline.CreateMessage(context, ResponseClassifier200);
@@ -992,6 +1272,57 @@ internal HttpMessage CreateGroupRequest(RequestContent content, RequestContext c
return message;
}
+ internal HttpMessage CreateFindSimilarFromLargeFaceListRequest(RequestContent content, RequestContext context)
+ {
+ var message = _pipeline.CreateMessage(context, ResponseClassifier200);
+ var request = message.Request;
+ request.Method = RequestMethod.Post;
+ var uri = new RawRequestUriBuilder();
+ uri.Reset(_endpoint);
+ uri.AppendRaw("/face/", false);
+ uri.AppendRaw(_apiVersion, true);
+ uri.AppendPath("/findsimilars", false);
+ request.Uri = uri;
+ request.Headers.Add("Accept", "application/json");
+ request.Headers.Add("Content-Type", "application/json");
+ request.Content = content;
+ return message;
+ }
+
+ internal HttpMessage CreateIdentifyFromLargePersonGroupRequest(RequestContent content, RequestContext context)
+ {
+ var message = _pipeline.CreateMessage(context, ResponseClassifier200);
+ var request = message.Request;
+ request.Method = RequestMethod.Post;
+ var uri = new RawRequestUriBuilder();
+ uri.Reset(_endpoint);
+ uri.AppendRaw("/face/", false);
+ uri.AppendRaw(_apiVersion, true);
+ uri.AppendPath("/identify", false);
+ request.Uri = uri;
+ request.Headers.Add("Accept", "application/json");
+ request.Headers.Add("Content-Type", "application/json");
+ request.Content = content;
+ return message;
+ }
+
+ internal HttpMessage CreateVerifyFromLargePersonGroupRequest(RequestContent content, RequestContext context)
+ {
+ var message = _pipeline.CreateMessage(context, ResponseClassifier200);
+ var request = message.Request;
+ request.Method = RequestMethod.Post;
+ var uri = new RawRequestUriBuilder();
+ uri.Reset(_endpoint);
+ uri.AppendRaw("/face/", false);
+ uri.AppendRaw(_apiVersion, true);
+ uri.AppendPath("/verify", false);
+ request.Uri = uri;
+ request.Headers.Add("Accept", "application/json");
+ request.Headers.Add("Content-Type", "application/json");
+ request.Content = content;
+ return message;
+ }
+
private static RequestContext DefaultRequestContext = new RequestContext();
internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default)
{
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs
new file mode 100644
index 000000000000..3d0dcf5633df
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs
@@ -0,0 +1,143 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ public partial class FaceIdentificationCandidate : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("personId"u8);
+ writer.WriteStringValue(PersonId);
+ writer.WritePropertyName("confidence"u8);
+ writer.WriteNumberValue(Confidence);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ FaceIdentificationCandidate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeFaceIdentificationCandidate(document.RootElement, options);
+ }
+
+ internal static FaceIdentificationCandidate DeserializeFaceIdentificationCandidate(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Guid personId = default;
+ float confidence = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("personId"u8))
+ {
+ personId = property.Value.GetGuid();
+ continue;
+ }
+ if (property.NameEquals("confidence"u8))
+ {
+ confidence = property.Value.GetSingle();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ FaceIdentificationCandidate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeFaceIdentificationCandidate(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static FaceIdentificationCandidate FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeFaceIdentificationCandidate(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs
new file mode 100644
index 000000000000..b4fcaba1400b
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs
@@ -0,0 +1,78 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Candidate for identify call.
+ public partial class FaceIdentificationCandidate
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// personId of candidate person.
+ /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].
+ internal FaceIdentificationCandidate(Guid personId, float confidence)
+ {
+ PersonId = personId;
+ Confidence = confidence;
+ }
+
+ /// Initializes a new instance of .
+ /// personId of candidate person.
+ /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].
+ /// Keeps track of any properties unknown to the library.
+ internal FaceIdentificationCandidate(Guid personId, float confidence, IDictionary serializedAdditionalRawData)
+ {
+ PersonId = personId;
+ Confidence = confidence;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal FaceIdentificationCandidate()
+ {
+ }
+
+ /// personId of candidate person.
+ public Guid PersonId { get; }
+ /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].
+ public float Confidence { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs
new file mode 100644
index 000000000000..edee38653822
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs
@@ -0,0 +1,153 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ public partial class FaceIdentificationResult : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("faceId"u8);
+ writer.WriteStringValue(FaceId);
+ writer.WritePropertyName("candidates"u8);
+ writer.WriteStartArray();
+ foreach (var item in Candidates)
+ {
+ writer.WriteObjectValue(item, options);
+ }
+ writer.WriteEndArray();
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ FaceIdentificationResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeFaceIdentificationResult(document.RootElement, options);
+ }
+
+ internal static FaceIdentificationResult DeserializeFaceIdentificationResult(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Guid faceId = default;
+ IReadOnlyList candidates = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("faceId"u8))
+ {
+ faceId = property.Value.GetGuid();
+ continue;
+ }
+ if (property.NameEquals("candidates"u8))
+ {
+ List array = new List();
+ foreach (var item in property.Value.EnumerateArray())
+ {
+ array.Add(FaceIdentificationCandidate.DeserializeFaceIdentificationCandidate(item, options));
+ }
+ candidates = array;
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new FaceIdentificationResult(faceId, candidates, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ FaceIdentificationResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeFaceIdentificationResult(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static FaceIdentificationResult FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeFaceIdentificationResult(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs
new file mode 100644
index 000000000000..1489f5061145
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs
@@ -0,0 +1,82 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Identify result.
+ public partial class FaceIdentificationResult
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// faceId of the query face.
+ /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.
+ /// is null.
+ internal FaceIdentificationResult(Guid faceId, IEnumerable candidates)
+ {
+ Argument.AssertNotNull(candidates, nameof(candidates));
+
+ FaceId = faceId;
+ Candidates = candidates.ToList();
+ }
+
+ /// Initializes a new instance of .
+ /// faceId of the query face.
+ /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.
+ /// Keeps track of any properties unknown to the library.
+ internal FaceIdentificationResult(Guid faceId, IReadOnlyList candidates, IDictionary serializedAdditionalRawData)
+ {
+ FaceId = faceId;
+ Candidates = candidates;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal FaceIdentificationResult()
+ {
+ }
+
+ /// faceId of the query face.
+ public Guid FaceId { get; }
+ /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.
+ public IReadOnlyList Candidates { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs
new file mode 100644
index 000000000000..67683c753b30
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs
@@ -0,0 +1,57 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ComponentModel;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The status of long running operation.
+ public readonly partial struct FaceOperationStatus : IEquatable
+ {
+ private readonly string _value;
+
+ /// Initializes a new instance of .
+ /// is null.
+ public FaceOperationStatus(string value)
+ {
+ _value = value ?? throw new ArgumentNullException(nameof(value));
+ }
+
+ private const string NotStartedValue = "notStarted";
+ private const string RunningValue = "running";
+ private const string SucceededValue = "succeeded";
+ private const string FailedValue = "failed";
+
+ /// The operation is not started.
+ public static FaceOperationStatus NotStarted { get; } = new FaceOperationStatus(NotStartedValue);
+ /// The operation is still running.
+ public static FaceOperationStatus Running { get; } = new FaceOperationStatus(RunningValue);
+ /// The operation is succeeded.
+ public static FaceOperationStatus Succeeded { get; } = new FaceOperationStatus(SucceededValue);
+ /// The operation is failed.
+ public static FaceOperationStatus Failed { get; } = new FaceOperationStatus(FailedValue);
+ /// Determines if two values are the same.
+ public static bool operator ==(FaceOperationStatus left, FaceOperationStatus right) => left.Equals(right);
+ /// Determines if two values are not the same.
+ public static bool operator !=(FaceOperationStatus left, FaceOperationStatus right) => !left.Equals(right);
+ /// Converts a to a .
+ public static implicit operator FaceOperationStatus(string value) => new FaceOperationStatus(value);
+
+ ///
+ [EditorBrowsable(EditorBrowsableState.Never)]
+ public override bool Equals(object obj) => obj is FaceOperationStatus other && Equals(other);
+ ///
+ public bool Equals(FaceOperationStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase);
+
+ ///
+ [EditorBrowsable(EditorBrowsableState.Never)]
+ public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0;
+ ///
+ public override string ToString() => _value;
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs
index 4019a5bc3493..f3f4c70329ee 100644
--- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs
@@ -106,18 +106,7 @@ public FaceSessionClient(Uri endpoint, TokenCredential credential, AzureAIVision
/// Body parameter.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLiveness/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details.
///
public virtual async Task> CreateLivenessSessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default)
{
@@ -133,18 +122,7 @@ public virtual async Task> CreateLivenessS
/// Body parameter.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLiveness/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details.
///
public virtual Response CreateLivenessSession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default)
{
@@ -306,7 +284,7 @@ public virtual Response DeleteLivenessSession(string sessionId, RequestContext c
}
}
- /// Get session result of detectLiveness/singleModal call.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details.
/// The unique ID to reference this session.
/// The cancellation token to use.
/// is null.
@@ -321,7 +299,7 @@ public virtual async Task> GetLivenessSessionResultAsy
return Response.FromValue(LivenessSession.FromResponse(response), response);
}
- /// Get session result of detectLiveness/singleModal call.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details.
/// The unique ID to reference this session.
/// The cancellation token to use.
/// is null.
@@ -337,7 +315,7 @@ public virtual Response GetLivenessSessionResult(string session
}
///
- /// [Protocol Method] Get session result of detectLiveness/singleModal call.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details.
///
/// -
///
@@ -377,7 +355,7 @@ public virtual async Task GetLivenessSessionResultAsync(string session
}
///
- /// [Protocol Method] Get session result of detectLiveness/singleModal call.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details.
///
/// -
///
@@ -420,11 +398,7 @@ public virtual Response GetLivenessSessionResult(string sessionId, RequestContex
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
/// The cancellation token to use.
- ///
- /// List sessions from the last sessionId greater than the 'start'.
- ///
- /// The result should be ordered by sessionId in ascending order.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details.
///
public virtual async Task>> GetLivenessSessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default)
{
@@ -445,11 +419,7 @@ public virtual async Task>> GetLiven
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
/// The cancellation token to use.
- ///
- /// List sessions from the last sessionId greater than the 'start'.
- ///
- /// The result should be ordered by sessionId in ascending order.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details.
///
public virtual Response> GetLivenessSessions(string start = null, int? top = null, CancellationToken cancellationToken = default)
{
@@ -540,7 +510,7 @@ public virtual Response GetLivenessSessions(string start, int? top, RequestConte
}
}
- /// Gets session requests and response body for the session.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details.
/// The unique ID to reference this session.
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
@@ -565,7 +535,7 @@ public virtual async Task>> Ge
return Response.FromValue(value, response);
}
- /// Gets session requests and response body for the session.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details.
/// The unique ID to reference this session.
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
@@ -591,7 +561,7 @@ public virtual Response> GetLivenessSes
}
///
- /// [Protocol Method] Gets session requests and response body for the session.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details.
///
/// -
///
@@ -633,7 +603,7 @@ public virtual async Task GetLivenessSessionAuditEntriesAsync(string s
}
///
- /// [Protocol Method] Gets session requests and response body for the session.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details.
///
/// -
///
@@ -678,27 +648,8 @@ public virtual Response GetLivenessSessionAuditEntries(string sessionId, string
/// Body parameter.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.
- /// > * To retrieve a result, use the Get Liveness With Verify Session.
- /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.
- ///
- /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.
- /// > [!NOTE]
- /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage.
- ///
- internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default)
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details.
+ internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(body, nameof(body));
@@ -712,27 +663,8 @@ internal virtual async Task> Cre
/// Body parameter.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.
- /// > * To retrieve a result, use the Get Liveness With Verify Session.
- /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.
- ///
- /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.
- /// > [!NOTE]
- /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage.
- ///
- internal virtual Response CreateLivenessWithVerifySession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default)
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details.
+ internal virtual Response CreateLivenessWithVerifySession(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(body, nameof(body));
@@ -752,7 +684,7 @@ internal virtual Response CreateLivenessW
///
/// -
///
- /// Please try the simpler convenience overload with strongly typed models first.
+ /// Please try the simpler convenience overload with strongly typed models first.
///
///
///
@@ -790,7 +722,7 @@ internal virtual async Task CreateLivenessWithVerifySessionAsync(Reque
///
/// -
///
- /// Please try the simpler convenience overload with strongly typed models first.
+ /// Please try the simpler convenience overload with strongly typed models first.
///
///
///
@@ -822,25 +754,8 @@ internal virtual Response CreateLivenessWithVerifySession(RequestContent content
/// Request content of liveness with verify session creation.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.
- /// > * To retrieve a result, use the Get Liveness With Verify Session.
- /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.
- ///
- /// Recommended Option: VerifyImage is provided during session creation.
- ///
- internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default)
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details.
+ internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(body, nameof(body));
@@ -854,25 +769,8 @@ internal virtual async Task> Cre
/// Request content of liveness with verify session creation.
/// The cancellation token to use.
/// is null.
- ///
- /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.
- ///
- /// Permissions includes...
- /// >
- /// *
- /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.
- /// * A token lifetime of 10 minutes.
- ///
- /// > [!NOTE]
- /// >
- /// > *
- /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.
- /// > * To retrieve a result, use the Get Liveness With Verify Session.
- /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.
- ///
- /// Recommended Option: VerifyImage is provided during session creation.
- ///
- internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default)
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details.
+ internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(body, nameof(body));
@@ -892,7 +790,7 @@ internal virtual Response CreateLivenessW
///
/// -
///
- /// Please try the simpler convenience overload with strongly typed models first.
+ /// Please try the simpler convenience overload with strongly typed models first.
///
///
///
@@ -931,7 +829,7 @@ internal virtual async Task CreateLivenessWithVerifySessionWithVerifyI
///
/// -
///
- /// Please try the simpler convenience overload with strongly typed models first.
+ /// Please try the simpler convenience overload with strongly typed models first.
///
///
///
@@ -1032,7 +930,7 @@ public virtual Response DeleteLivenessWithVerifySession(string sessionId, Reques
}
}
- /// Get session result of detectLivenessWithVerify/singleModal call.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details.
/// The unique ID to reference this session.
/// The cancellation token to use.
/// is null.
@@ -1047,7 +945,7 @@ public virtual async Task> GetLivenessWithVe
return Response.FromValue(LivenessWithVerifySession.FromResponse(response), response);
}
- /// Get session result of detectLivenessWithVerify/singleModal call.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details.
/// The unique ID to reference this session.
/// The cancellation token to use.
/// is null.
@@ -1063,7 +961,7 @@ public virtual Response GetLivenessWithVerifySessionR
}
///
- /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details.
///
/// -
///
@@ -1103,7 +1001,7 @@ public virtual async Task GetLivenessWithVerifySessionResultAsync(stri
}
///
- /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details.
///
/// -
///
@@ -1146,11 +1044,7 @@ public virtual Response GetLivenessWithVerifySessionResult(string sessionId, Req
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
/// The cancellation token to use.
- ///
- /// List sessions from the last sessionId greater than the "start".
- ///
- /// The result should be ordered by sessionId in ascending order.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details.
///
public virtual async Task>> GetLivenessWithVerifySessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default)
{
@@ -1171,11 +1065,7 @@ public virtual async Task>> GetLiven
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
/// The cancellation token to use.
- ///
- /// List sessions from the last sessionId greater than the "start".
- ///
- /// The result should be ordered by sessionId in ascending order.
- ///
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details.
///
public virtual Response> GetLivenessWithVerifySessions(string start = null, int? top = null, CancellationToken cancellationToken = default)
{
@@ -1266,7 +1156,7 @@ public virtual Response GetLivenessWithVerifySessions(string start, int? top, Re
}
}
- /// Gets session requests and response body for the session.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details.
/// The unique ID to reference this session.
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
@@ -1291,7 +1181,7 @@ public virtual async Task>> Ge
return Response.FromValue(value, response);
}
- /// Gets session requests and response body for the session.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details.
/// The unique ID to reference this session.
/// List resources greater than the "start". It contains no more than 64 characters. Default is empty.
/// The number of items to list, ranging in [1, 1000]. Default is 1000.
@@ -1317,7 +1207,7 @@ public virtual Response> GetLivenessWit
}
///
- /// [Protocol Method] Gets session requests and response body for the session.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details.
///
/// -
///
@@ -1359,7 +1249,7 @@ public virtual async Task GetLivenessWithVerifySessionAuditEntriesAsyn
}
///
- /// [Protocol Method] Gets session requests and response body for the session.
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details.
///
/// -
///
@@ -1400,6 +1290,270 @@ public virtual Response GetLivenessWithVerifySessionAuditEntries(string sessionI
}
}
+ /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
+ /// Id of session image.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
+ /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ /// Return faceIds of the detected faces or not. The default value is true.
+ /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
+ /// Return face landmarks of the detected faces or not. The default value is false.
+ /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.
+ /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details.
+ ///
+ public virtual async Task>> DetectFromSessionImageAsync(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(sessionImageId, nameof(sessionImageId));
+
+ DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = await DetectFromSessionImageAsync(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context).ConfigureAwait(false);
+ IReadOnlyList value = default;
+ using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
+ /// Id of session image.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
+ /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.
+ /// Return faceIds of the detected faces or not. The default value is true.
+ /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
+ /// Return face landmarks of the detected faces or not. The default value is false.
+ /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.
+ /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
+ /// The cancellation token to use.
+ /// is null.
+ /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details.
+ ///
+ public virtual Response> DetectFromSessionImage(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNull(sessionImageId, nameof(sessionImageId));
+
+ DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null);
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = DetectFromSessionImage(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context);
+ IReadOnlyList value = default;
+ using var document = JsonDocument.Parse(response.ContentStream);
+ List array = new List();
+ foreach (var item in document.RootElement.EnumerateArray())
+ {
+ array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item));
+ }
+ value = array;
+ return Response.FromValue(value, response);
+ }
+
+ ///
+ /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
+ /// Return faceIds of the detected faces or not. The default value is true.
+ /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
+ /// Return face landmarks of the detected faces or not. The default value is false.
+ /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.
+ /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual async Task DetectFromSessionImageAsync(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context);
+ return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ ///
+ /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The content to send as the body of the request.
+ /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03".
+ /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04".
+ /// Return faceIds of the detected faces or not. The default value is true.
+ /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.
+ /// Return face landmarks of the detected faces or not. The default value is false.
+ /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.
+ /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual Response DetectFromSessionImage(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null)
+ {
+ Argument.AssertNotNull(content, nameof(content));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context);
+ return _pipeline.ProcessMessage(message, context);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details.
+ /// The request ID of the image to be retrieved.
+ /// The cancellation token to use.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ ///
+ public virtual async Task> GetSessionImageAsync(string sessionImageId, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId));
+
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = await GetSessionImageAsync(sessionImageId, context).ConfigureAwait(false);
+ return Response.FromValue(response.Content, response);
+ }
+
+ /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details.
+ /// The request ID of the image to be retrieved.
+ /// The cancellation token to use.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ ///
+ public virtual Response GetSessionImage(string sessionImageId, CancellationToken cancellationToken = default)
+ {
+ Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId));
+
+ RequestContext context = FromCancellationToken(cancellationToken);
+ Response response = GetSessionImage(sessionImageId, context);
+ return Response.FromValue(response.Content, response);
+ }
+
+ ///
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The request ID of the image to be retrieved.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual async Task GetSessionImageAsync(string sessionImageId, RequestContext context)
+ {
+ Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context);
+ return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
+ ///
+ /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details.
+ ///
+ /// -
+ ///
+ /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios.
+ ///
+ ///
+ /// -
+ ///
+ /// Please try the simpler convenience overload with strongly typed models first.
+ ///
+ ///
+ ///
+ ///
+ /// The request ID of the image to be retrieved.
+ /// The request context, which can override default behaviors of the client pipeline on a per-call basis.
+ /// is null.
+ /// is an empty string, and was expected to be non-empty.
+ /// Service returned a non-success status code.
+ /// The response returned from the service.
+ ///
+ public virtual Response GetSessionImage(string sessionImageId, RequestContext context)
+ {
+ Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId));
+
+ using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage");
+ scope.Start();
+ try
+ {
+ using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context);
+ return _pipeline.ProcessMessage(message, context);
+ }
+ catch (Exception e)
+ {
+ scope.Failed(e);
+ throw;
+ }
+ }
+
internal HttpMessage CreateCreateLivenessSessionRequest(RequestContent content, RequestContext context)
{
var message = _pipeline.CreateMessage(context, ResponseClassifier200);
@@ -1611,6 +1765,67 @@ internal HttpMessage CreateGetLivenessWithVerifySessionAuditEntriesRequest(strin
return message;
}
+ internal HttpMessage CreateDetectFromSessionImageRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context)
+ {
+ var message = _pipeline.CreateMessage(context, ResponseClassifier200);
+ var request = message.Request;
+ request.Method = RequestMethod.Post;
+ var uri = new RawRequestUriBuilder();
+ uri.Reset(_endpoint);
+ uri.AppendRaw("/face/", false);
+ uri.AppendRaw(_apiVersion, true);
+ uri.AppendPath("/detect", false);
+ if (detectionModel != null)
+ {
+ uri.AppendQuery("detectionModel", detectionModel, true);
+ }
+ if (recognitionModel != null)
+ {
+ uri.AppendQuery("recognitionModel", recognitionModel, true);
+ }
+ if (returnFaceId != null)
+ {
+ uri.AppendQuery("returnFaceId", returnFaceId.Value, true);
+ }
+ if (returnFaceAttributes != null && !(returnFaceAttributes is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined))
+ {
+ uri.AppendQueryDelimited("returnFaceAttributes", returnFaceAttributes, ",", true);
+ }
+ if (returnFaceLandmarks != null)
+ {
+ uri.AppendQuery("returnFaceLandmarks", returnFaceLandmarks.Value, true);
+ }
+ if (returnRecognitionModel != null)
+ {
+ uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true);
+ }
+ if (faceIdTimeToLive != null)
+ {
+ uri.AppendQuery("faceIdTimeToLive", faceIdTimeToLive.Value, true);
+ }
+ request.Uri = uri;
+ request.Headers.Add("Accept", "application/json");
+ request.Headers.Add("Content-Type", "application/json");
+ request.Content = content;
+ return message;
+ }
+
+ internal HttpMessage CreateGetSessionImageRequest(string sessionImageId, RequestContext context)
+ {
+ var message = _pipeline.CreateMessage(context, ResponseClassifier200);
+ var request = message.Request;
+ request.Method = RequestMethod.Get;
+ var uri = new RawRequestUriBuilder();
+ uri.Reset(_endpoint);
+ uri.AppendRaw("/face/", false);
+ uri.AppendRaw(_apiVersion, true);
+ uri.AppendPath("/session/sessionImages/", false);
+ uri.AppendPath(sessionImageId, true);
+ request.Uri = uri;
+ request.Headers.Add("Accept", "application/octet-stream");
+ return message;
+ }
+
private static RequestContext DefaultRequestContext = new RequestContext();
internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default)
{
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs
new file mode 100644
index 000000000000..3baf11bb5f72
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs
@@ -0,0 +1,176 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ public partial class FaceTrainingResult : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("status"u8);
+ writer.WriteStringValue(Status.ToString());
+ writer.WritePropertyName("createdDateTime"u8);
+ writer.WriteStringValue(CreatedDateTime, "O");
+ writer.WritePropertyName("lastActionDateTime"u8);
+ writer.WriteStringValue(LastActionDateTime, "O");
+ writer.WritePropertyName("lastSuccessfulTrainingDateTime"u8);
+ writer.WriteStringValue(LastSuccessfulTrainingDateTime, "O");
+ if (Optional.IsDefined(Message))
+ {
+ writer.WritePropertyName("message"u8);
+ writer.WriteStringValue(Message);
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ FaceTrainingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeFaceTrainingResult(document.RootElement, options);
+ }
+
+ internal static FaceTrainingResult DeserializeFaceTrainingResult(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ FaceOperationStatus status = default;
+ DateTimeOffset createdDateTime = default;
+ DateTimeOffset lastActionDateTime = default;
+ DateTimeOffset lastSuccessfulTrainingDateTime = default;
+ string message = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("status"u8))
+ {
+ status = new FaceOperationStatus(property.Value.GetString());
+ continue;
+ }
+ if (property.NameEquals("createdDateTime"u8))
+ {
+ createdDateTime = property.Value.GetDateTimeOffset("O");
+ continue;
+ }
+ if (property.NameEquals("lastActionDateTime"u8))
+ {
+ lastActionDateTime = property.Value.GetDateTimeOffset("O");
+ continue;
+ }
+ if (property.NameEquals("lastSuccessfulTrainingDateTime"u8))
+ {
+ lastSuccessfulTrainingDateTime = property.Value.GetDateTimeOffset("O");
+ continue;
+ }
+ if (property.NameEquals("message"u8))
+ {
+ message = property.Value.GetString();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new FaceTrainingResult(
+ status,
+ createdDateTime,
+ lastActionDateTime,
+ lastSuccessfulTrainingDateTime,
+ message,
+ serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ FaceTrainingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeFaceTrainingResult(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static FaceTrainingResult FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeFaceTrainingResult(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs
new file mode 100644
index 000000000000..79ac7a85a12b
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs
@@ -0,0 +1,94 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// Training result of a container.
+ public partial class FaceTrainingResult
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// Training status of the container.
+ /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list.
+ /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained.
+ /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list.
+ internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime)
+ {
+ Status = status;
+ CreatedDateTime = createdDateTime;
+ LastActionDateTime = lastActionDateTime;
+ LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime;
+ }
+
+ /// Initializes a new instance of .
+ /// Training status of the container.
+ /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list.
+ /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained.
+ /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list.
+ /// Show failure message when training failed (omitted when training succeed).
+ /// Keeps track of any properties unknown to the library.
+ internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime, string message, IDictionary serializedAdditionalRawData)
+ {
+ Status = status;
+ CreatedDateTime = createdDateTime;
+ LastActionDateTime = lastActionDateTime;
+ LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime;
+ Message = message;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal FaceTrainingResult()
+ {
+ }
+
+ /// Training status of the container.
+ public FaceOperationStatus Status { get; }
+ /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list.
+ public DateTimeOffset CreatedDateTime { get; }
+ /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained.
+ public DateTimeOffset LastActionDateTime { get; }
+ /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list.
+ public DateTimeOffset LastSuccessfulTrainingDateTime { get; }
+ /// Show failure message when training failed (omitted when training succeed).
+ public string Message { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs
new file mode 100644
index 000000000000..dedc938a5798
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs
@@ -0,0 +1,173 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class FindSimilarFromLargeFaceListRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("faceId"u8);
+ writer.WriteStringValue(FaceId);
+ if (Optional.IsDefined(MaxNumOfCandidatesReturned))
+ {
+ writer.WritePropertyName("maxNumOfCandidatesReturned"u8);
+ writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value);
+ }
+ if (Optional.IsDefined(Mode))
+ {
+ writer.WritePropertyName("mode"u8);
+ writer.WriteStringValue(Mode.Value.ToString());
+ }
+ writer.WritePropertyName("largeFaceListId"u8);
+ writer.WriteStringValue(LargeFaceListId);
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ FindSimilarFromLargeFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options);
+ }
+
+ internal static FindSimilarFromLargeFaceListRequest DeserializeFindSimilarFromLargeFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ Guid faceId = default;
+ int? maxNumOfCandidatesReturned = default;
+ FindSimilarMatchMode? mode = default;
+ string largeFaceListId = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("faceId"u8))
+ {
+ faceId = property.Value.GetGuid();
+ continue;
+ }
+ if (property.NameEquals("maxNumOfCandidatesReturned"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ maxNumOfCandidatesReturned = property.Value.GetInt32();
+ continue;
+ }
+ if (property.NameEquals("mode"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ mode = new FindSimilarMatchMode(property.Value.GetString());
+ continue;
+ }
+ if (property.NameEquals("largeFaceListId"u8))
+ {
+ largeFaceListId = property.Value.GetString();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ FindSimilarFromLargeFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static FindSimilarFromLargeFaceListRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs
new file mode 100644
index 000000000000..8603dc4b91cf
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The FindSimilarFromLargeFaceListRequest.
+ internal partial class FindSimilarFromLargeFaceListRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.
+ /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.
+ /// is null.
+ internal FindSimilarFromLargeFaceListRequest(Guid faceId, string largeFaceListId)
+ {
+ Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId));
+
+ FaceId = faceId;
+ LargeFaceListId = largeFaceListId;
+ }
+
+ /// Initializes a new instance of .
+ /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.
+ /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.
+ /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
+ /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.
+ /// Keeps track of any properties unknown to the library.
+ internal FindSimilarFromLargeFaceListRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, string largeFaceListId, IDictionary serializedAdditionalRawData)
+ {
+ FaceId = faceId;
+ MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned;
+ Mode = mode;
+ LargeFaceListId = largeFaceListId;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of for deserialization.
+ internal FindSimilarFromLargeFaceListRequest()
+ {
+ }
+
+ /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.
+ public Guid FaceId { get; }
+ /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.
+ public int? MaxNumOfCandidatesReturned { get; }
+ /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.
+ public FindSimilarMatchMode? Mode { get; }
+ /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.
+ public string LargeFaceListId { get; }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs
new file mode 100644
index 000000000000..44d5b2573408
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs
@@ -0,0 +1,183 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.Core;
+
+namespace Azure.AI.Vision.Face
+{
+ internal partial class IdentifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel
+ {
+ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions);
+
+ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{format}' format.");
+ }
+
+ writer.WriteStartObject();
+ writer.WritePropertyName("faceIds"u8);
+ writer.WriteStartArray();
+ foreach (var item in FaceIds)
+ {
+ writer.WriteStringValue(item);
+ }
+ writer.WriteEndArray();
+ writer.WritePropertyName("largePersonGroupId"u8);
+ writer.WriteStringValue(LargePersonGroupId);
+ if (Optional.IsDefined(MaxNumOfCandidatesReturned))
+ {
+ writer.WritePropertyName("maxNumOfCandidatesReturned"u8);
+ writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value);
+ }
+ if (Optional.IsDefined(ConfidenceThreshold))
+ {
+ writer.WritePropertyName("confidenceThreshold"u8);
+ writer.WriteNumberValue(ConfidenceThreshold.Value);
+ }
+ if (options.Format != "W" && _serializedAdditionalRawData != null)
+ {
+ foreach (var item in _serializedAdditionalRawData)
+ {
+ writer.WritePropertyName(item.Key);
+#if NET6_0_OR_GREATER
+ writer.WriteRawValue(item.Value);
+#else
+ using (JsonDocument document = JsonDocument.Parse(item.Value))
+ {
+ JsonSerializer.Serialize(writer, document.RootElement);
+ }
+#endif
+ }
+ }
+ writer.WriteEndObject();
+ }
+
+ IdentifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+ if (format != "J")
+ {
+ throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{format}' format.");
+ }
+
+ using JsonDocument document = JsonDocument.ParseValue(ref reader);
+ return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options);
+ }
+
+ internal static IdentifyFromLargePersonGroupRequest DeserializeIdentifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null)
+ {
+ options ??= ModelSerializationExtensions.WireOptions;
+
+ if (element.ValueKind == JsonValueKind.Null)
+ {
+ return null;
+ }
+ IReadOnlyList faceIds = default;
+ string largePersonGroupId = default;
+ int? maxNumOfCandidatesReturned = default;
+ float? confidenceThreshold = default;
+ IDictionary serializedAdditionalRawData = default;
+ Dictionary rawDataDictionary = new Dictionary();
+ foreach (var property in element.EnumerateObject())
+ {
+ if (property.NameEquals("faceIds"u8))
+ {
+ List array = new List();
+ foreach (var item in property.Value.EnumerateArray())
+ {
+ array.Add(item.GetGuid());
+ }
+ faceIds = array;
+ continue;
+ }
+ if (property.NameEquals("largePersonGroupId"u8))
+ {
+ largePersonGroupId = property.Value.GetString();
+ continue;
+ }
+ if (property.NameEquals("maxNumOfCandidatesReturned"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ maxNumOfCandidatesReturned = property.Value.GetInt32();
+ continue;
+ }
+ if (property.NameEquals("confidenceThreshold"u8))
+ {
+ if (property.Value.ValueKind == JsonValueKind.Null)
+ {
+ continue;
+ }
+ confidenceThreshold = property.Value.GetSingle();
+ continue;
+ }
+ if (options.Format != "W")
+ {
+ rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
+ }
+ }
+ serializedAdditionalRawData = rawDataDictionary;
+ return new IdentifyFromLargePersonGroupRequest(faceIds, largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData);
+ }
+
+ BinaryData IPersistableModel.Write(ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ return ModelReaderWriter.Write(this, options);
+ default:
+ throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format.");
+ }
+ }
+
+ IdentifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options)
+ {
+ var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format;
+
+ switch (format)
+ {
+ case "J":
+ {
+ using JsonDocument document = JsonDocument.Parse(data);
+ return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options);
+ }
+ default:
+ throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format.");
+ }
+ }
+
+ string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J";
+
+ /// Deserializes the model from a raw response.
+ /// The response to deserialize the model from.
+ internal static IdentifyFromLargePersonGroupRequest FromResponse(Response response)
+ {
+ using var document = JsonDocument.Parse(response.Content);
+ return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement);
+ }
+
+ /// Convert into a .
+ internal virtual RequestContent ToRequestContent()
+ {
+ var content = new Utf8JsonRequestContent();
+ content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions);
+ return content;
+ }
+ }
+}
diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs
new file mode 100644
index 000000000000..f8ae4acdf23c
--- /dev/null
+++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs
@@ -0,0 +1,91 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+//
+
+#nullable disable
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Azure.AI.Vision.Face
+{
+ /// The IdentifyFromLargePersonGroupRequest.
+ internal partial class IdentifyFromLargePersonGroupRequest
+ {
+ ///
+ /// Keeps track of any properties unknown to the library.
+ ///
+ /// To assign an object to the value of this property use .
+ ///
+ ///
+ /// To assign an already formatted json string to this property use .
+ ///
+ ///
+ /// Examples:
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson("foo")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromString("\"foo\"")
+ /// Creates a payload of "foo".
+ ///
+ /// -
+ /// BinaryData.FromObjectAsJson(new { key = "value" })
+ /// Creates a payload of { "key": "value" }.
+ ///
+ /// -
+ /// BinaryData.FromString("{\"key\": \"value\"}")
+ /// Creates a payload of { "key": "value" }.
+ ///
+ ///
+ ///
+ ///
+ private IDictionary _serializedAdditionalRawData;
+
+ /// Initializes a new instance of .
+ /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].
+ /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time.
+ /// or is null.
+ internal IdentifyFromLargePersonGroupRequest(IEnumerable faceIds, string largePersonGroupId)
+ {
+ Argument.AssertNotNull(faceIds, nameof(faceIds));
+ Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId));
+
+ FaceIds = faceIds.ToList();
+ LargePersonGroupId = largePersonGroupId;
+ }
+
+ /// Initializes a new instance of .
+ /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].
+ /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time.
+ /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.
+ /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.
+ /// Keeps track of any properties unknown to the library.
+ internal IdentifyFromLargePersonGroupRequest(IReadOnlyList faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData)
+ {
+ FaceIds = faceIds;
+ LargePersonGroupId = largePersonGroupId;
+ MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned;
+ ConfidenceThreshold = confidenceThreshold;
+ _serializedAdditionalRawData = serializedAdditionalRawData;
+ }
+
+ /// Initializes a new instance of