diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.net8.0.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.net8.0.cs index 618789493d12..3e6a644bb79d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.net8.0.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.net8.0.cs @@ -2236,8 +2236,10 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class AzurePostgreSqlLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AzurePostgreSqlLinkedService() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement AzureCloudType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement CommandTimeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.DataFactoryCredentialReference Credential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Encoding { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } @@ -2245,7 +2247,13 @@ public AzurePostgreSqlLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBufferSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ServicePrincipalCredentialType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalEmbeddedCert { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalEmbeddedCertPassword { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ServicePrincipalId { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalKey { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Tenant { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Timeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Timezone { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement TrustServerCertificate { get { throw null; } set { } } @@ -2261,6 +2269,8 @@ public partial class AzurePostgreSqlSink : Azure.ResourceManager.DataFactory.Mod { public AzurePostgreSqlSink() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement PreCopyScript { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> UpsertKeys { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum? WriteMethod { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlSink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -2292,6 +2302,25 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AzurePostgreSqlWriteMethodEnum : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AzurePostgreSqlWriteMethodEnum(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum BulkInsert { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum CopyCommand { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum Upsert { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum left, Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum left, Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum right) { throw null; } + public override string ToString() { throw null; } + } public partial class AzureQueueSink : Azure.ResourceManager.DataFactory.Models.CopySink, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AzureQueueSink() { } @@ -2929,6 +2958,8 @@ public partial class CommonDataServiceForAppsSink : Azure.ResourceManager.DataFa { public CommonDataServiceForAppsSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -4263,6 +4294,7 @@ public partial class DataFactoryScriptActivity : Azure.ResourceManager.DataFacto { public DataFactoryScriptActivity(string name) : base (default(string)) { } public Azure.ResourceManager.DataFactory.Models.ScriptActivityTypeLogSettings LogSettings { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReturnMultistatementResult { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ScriptBlockExecutionTimeout { get { throw null; } set { } } public System.Collections.Generic.IList Scripts { get { throw null; } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -5209,6 +5241,8 @@ public partial class DynamicsCrmSink : Azure.ResourceManager.DataFactory.Models. { public DynamicsCrmSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -5269,6 +5303,8 @@ public partial class DynamicsSink : Azure.ResourceManager.DataFactory.Models.Cop { public DynamicsSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -6205,12 +6241,37 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct GreenplumAuthenticationType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public GreenplumAuthenticationType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType Basic { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType left, Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType left, Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType right) { throw null; } + public override string ToString() { throw null; } + } public partial class GreenplumLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public GreenplumLinkedService() { } + public Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType? AuthenticationType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CommandTimeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionTimeout { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Host { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.GreenplumLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -8686,6 +8747,23 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct OracleAuthenticationType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public OracleAuthenticationType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType Basic { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType left, Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType left, Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType right) { throw null; } + public override string ToString() { throw null; } + } public partial class OracleCloudStorageLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public OracleCloudStorageLinkedService() { } @@ -8734,10 +8812,24 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class OracleLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public OracleLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement connectionString) { } + public OracleLinkedService() { } + public Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType? AuthenticationType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CryptoChecksumClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CryptoChecksumTypesClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableBulkLoad { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EncryptionClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EncryptionTypesClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement FetchSize { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement FetchTswtzAsTimestamp { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement InitializationString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement InitialLobFetchSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement StatementCacheSize { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SupportV1DataTypes { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.OracleLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -9384,11 +9476,12 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class PrestoLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public PrestoLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement host, Azure.Core.Expressions.DataFactory.DataFactoryElement serverVersion, Azure.Core.Expressions.DataFactory.DataFactoryElement catalog, Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType authenticationType) { } + public PrestoLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement host, Azure.Core.Expressions.DataFactory.DataFactoryElement catalog, Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType authenticationType) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AllowHostNameCNMismatch { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement AllowSelfSignedServerCert { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType AuthenticationType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Catalog { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableServerCertificateValidation { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableSsl { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Host { get { throw null; } set { } } @@ -10356,7 +10449,7 @@ public SapOdpLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncLibraryPath { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMyName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncPartnerName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncQop { get { throw null; } set { } } @@ -10458,7 +10551,7 @@ public SapTableLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncLibraryPath { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMyName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncPartnerName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncQop { get { throw null; } set { } } @@ -10856,6 +10949,7 @@ public partial class ServiceNowV2ObjectDataset : Azure.ResourceManager.DataFacto { public ServiceNowV2ObjectDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName) : base (default(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference)) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement TableName { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.ValueType? ValueType { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.ServiceNowV2ObjectDataset System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -12274,14 +12368,31 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public static bool operator !=(Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType left, Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType right) { throw null; } public override string ToString() { throw null; } } + public partial class TeradataImportCommand : Azure.ResourceManager.DataFactory.Models.ImportSettings, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public TeradataImportCommand() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AdditionalFormatOptions { get { throw null; } set { } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataImportCommand System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataImportCommand System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class TeradataLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public TeradataLinkedService() { } public Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType? AuthenticationType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CharacterSet { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpsPortNumber { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement MaxRespSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PortNumber { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement UseDataEncryption { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.TeradataLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -12303,6 +12414,17 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class TeradataSink : Azure.ResourceManager.DataFactory.Models.CopySink, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public TeradataSink() { } + public Azure.ResourceManager.DataFactory.Models.TeradataImportCommand ImportSettings { get { throw null; } set { } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataSink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataSink System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class TeradataSource : Azure.ResourceManager.DataFactory.Models.TabularSource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public TeradataSource() { } @@ -12464,6 +12586,24 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ValueType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ValueType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.ValueType Actual { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.ValueType Display { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.ValueType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.ValueType left, Azure.ResourceManager.DataFactory.Models.ValueType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.ValueType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.ValueType left, Azure.ResourceManager.DataFactory.Models.ValueType right) { throw null; } + public override string ToString() { throw null; } + } public partial class VerticaLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public VerticaLinkedService() { } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs index 618789493d12..3e6a644bb79d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs @@ -2236,8 +2236,10 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public partial class AzurePostgreSqlLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AzurePostgreSqlLinkedService() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement AzureCloudType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement CommandTimeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.DataFactoryCredentialReference Credential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Encoding { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } @@ -2245,7 +2247,13 @@ public AzurePostgreSqlLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBufferSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ServicePrincipalCredentialType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalEmbeddedCert { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalEmbeddedCertPassword { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ServicePrincipalId { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecret ServicePrincipalKey { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Tenant { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Timeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Timezone { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement TrustServerCertificate { get { throw null; } set { } } @@ -2261,6 +2269,8 @@ public partial class AzurePostgreSqlSink : Azure.ResourceManager.DataFactory.Mod { public AzurePostgreSqlSink() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement PreCopyScript { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> UpsertKeys { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum? WriteMethod { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlSink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -2292,6 +2302,25 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AzurePostgreSqlWriteMethodEnum : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AzurePostgreSqlWriteMethodEnum(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum BulkInsert { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum CopyCommand { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum Upsert { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum left, Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum left, Azure.ResourceManager.DataFactory.Models.AzurePostgreSqlWriteMethodEnum right) { throw null; } + public override string ToString() { throw null; } + } public partial class AzureQueueSink : Azure.ResourceManager.DataFactory.Models.CopySink, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public AzureQueueSink() { } @@ -2929,6 +2958,8 @@ public partial class CommonDataServiceForAppsSink : Azure.ResourceManager.DataFa { public CommonDataServiceForAppsSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -4263,6 +4294,7 @@ public partial class DataFactoryScriptActivity : Azure.ResourceManager.DataFacto { public DataFactoryScriptActivity(string name) : base (default(string)) { } public Azure.ResourceManager.DataFactory.Models.ScriptActivityTypeLogSettings LogSettings { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReturnMultistatementResult { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ScriptBlockExecutionTimeout { get { throw null; } set { } } public System.Collections.Generic.IList Scripts { get { throw null; } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -5209,6 +5241,8 @@ public partial class DynamicsCrmSink : Azure.ResourceManager.DataFactory.Models. { public DynamicsCrmSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -5269,6 +5303,8 @@ public partial class DynamicsSink : Azure.ResourceManager.DataFactory.Models.Cop { public DynamicsSink(Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior writeBehavior) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AlternateKeyName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassBusinessLogicExecution { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement BypassPowerAutomateFlows { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DynamicsSinkWriteBehavior WriteBehavior { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -6205,12 +6241,37 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct GreenplumAuthenticationType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public GreenplumAuthenticationType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType Basic { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType left, Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType left, Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType right) { throw null; } + public override string ToString() { throw null; } + } public partial class GreenplumLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public GreenplumLinkedService() { } + public Azure.ResourceManager.DataFactory.Models.GreenplumAuthenticationType? AuthenticationType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CommandTimeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionTimeout { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Host { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.GreenplumLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -8686,6 +8747,23 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct OracleAuthenticationType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public OracleAuthenticationType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType Basic { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType left, Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType left, Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType right) { throw null; } + public override string ToString() { throw null; } + } public partial class OracleCloudStorageLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public OracleCloudStorageLinkedService() { } @@ -8734,10 +8812,24 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class OracleLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public OracleLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement connectionString) { } + public OracleLinkedService() { } + public Azure.ResourceManager.DataFactory.Models.OracleAuthenticationType? AuthenticationType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CryptoChecksumClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CryptoChecksumTypesClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableBulkLoad { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EncryptionClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EncryptionTypesClient { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement FetchSize { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement FetchTswtzAsTimestamp { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement InitializationString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement InitialLobFetchSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement StatementCacheSize { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SupportV1DataTypes { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.OracleLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -9384,11 +9476,12 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write } public partial class PrestoLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { - public PrestoLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement host, Azure.Core.Expressions.DataFactory.DataFactoryElement serverVersion, Azure.Core.Expressions.DataFactory.DataFactoryElement catalog, Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType authenticationType) { } + public PrestoLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement host, Azure.Core.Expressions.DataFactory.DataFactoryElement catalog, Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType authenticationType) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement AllowHostNameCNMismatch { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement AllowSelfSignedServerCert { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.PrestoAuthenticationType AuthenticationType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Catalog { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableServerCertificateValidation { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableSsl { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Host { get { throw null; } set { } } @@ -10356,7 +10449,7 @@ public SapOdpLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncLibraryPath { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMyName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncPartnerName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncQop { get { throw null; } set { } } @@ -10458,7 +10551,7 @@ public SapTableLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncLibraryPath { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMode { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncMyName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncPartnerName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SncQop { get { throw null; } set { } } @@ -10856,6 +10949,7 @@ public partial class ServiceNowV2ObjectDataset : Azure.ResourceManager.DataFacto { public ServiceNowV2ObjectDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName) : base (default(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference)) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement TableName { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.ValueType? ValueType { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.ServiceNowV2ObjectDataset System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -12274,14 +12368,31 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write public static bool operator !=(Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType left, Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType right) { throw null; } public override string ToString() { throw null; } } + public partial class TeradataImportCommand : Azure.ResourceManager.DataFactory.Models.ImportSettings, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public TeradataImportCommand() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AdditionalFormatOptions { get { throw null; } set { } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataImportCommand System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataImportCommand System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class TeradataLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public TeradataLinkedService() { } public Azure.ResourceManager.DataFactory.Models.TeradataAuthenticationType? AuthenticationType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement CharacterSet { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpsPortNumber { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement MaxRespSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecret Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PortNumber { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement UseDataEncryption { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.ResourceManager.DataFactory.Models.TeradataLinkedService System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -12303,6 +12414,17 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class TeradataSink : Azure.ResourceManager.DataFactory.Models.CopySink, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public TeradataSink() { } + public Azure.ResourceManager.DataFactory.Models.TeradataImportCommand ImportSettings { get { throw null; } set { } } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataSink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.ResourceManager.DataFactory.Models.TeradataSink System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class TeradataSource : Azure.ResourceManager.DataFactory.Models.TabularSource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public TeradataSource() { } @@ -12464,6 +12586,24 @@ protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter write string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ValueType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ValueType(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.ValueType Actual { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.ValueType Display { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.ValueType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.ValueType left, Azure.ResourceManager.DataFactory.Models.ValueType right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.ValueType (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.ValueType left, Azure.ResourceManager.DataFactory.Models.ValueType right) { throw null; } + public override string ToString() { throw null; } + } public partial class VerticaLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { public VerticaLinkedService() { } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.Serialization.cs index fddcf4ab3cbd..878b66d9d90b 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.Serialization.cs @@ -108,6 +108,46 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("encryptedCredential"u8); writer.WriteStringValue(EncryptedCredential); } + if (Optional.IsDefined(ServicePrincipalId)) + { + writer.WritePropertyName("servicePrincipalId"u8); + JsonSerializer.Serialize(writer, ServicePrincipalId); + } + if (Optional.IsDefined(ServicePrincipalKey)) + { + writer.WritePropertyName("servicePrincipalKey"u8); + JsonSerializer.Serialize(writer, ServicePrincipalKey); + } + if (Optional.IsDefined(ServicePrincipalCredentialType)) + { + writer.WritePropertyName("servicePrincipalCredentialType"u8); + JsonSerializer.Serialize(writer, ServicePrincipalCredentialType); + } + if (Optional.IsDefined(ServicePrincipalEmbeddedCert)) + { + writer.WritePropertyName("servicePrincipalEmbeddedCert"u8); + JsonSerializer.Serialize(writer, ServicePrincipalEmbeddedCert); + } + if (Optional.IsDefined(ServicePrincipalEmbeddedCertPassword)) + { + writer.WritePropertyName("servicePrincipalEmbeddedCertPassword"u8); + JsonSerializer.Serialize(writer, ServicePrincipalEmbeddedCertPassword); + } + if (Optional.IsDefined(Tenant)) + { + writer.WritePropertyName("tenant"u8); + JsonSerializer.Serialize(writer, Tenant); + } + if (Optional.IsDefined(AzureCloudType)) + { + writer.WritePropertyName("azureCloudType"u8); + JsonSerializer.Serialize(writer, AzureCloudType); + } + if (Optional.IsDefined(Credential)) + { + writer.WritePropertyName("credential"u8); + writer.WriteObjectValue(Credential, options); + } writer.WriteEndObject(); foreach (var item in AdditionalProperties) { @@ -163,6 +203,14 @@ internal static AzurePostgreSqlLinkedService DeserializeAzurePostgreSqlLinkedSer DataFactoryElement encoding = default; DataFactoryKeyVaultSecret password = default; string encryptedCredential = default; + DataFactoryElement servicePrincipalId = default; + DataFactorySecret servicePrincipalKey = default; + DataFactoryElement servicePrincipalCredentialType = default; + DataFactorySecret servicePrincipalEmbeddedCert = default; + DataFactorySecret servicePrincipalEmbeddedCertPassword = default; + DataFactoryElement tenant = default; + DataFactoryElement azureCloudType = default; + DataFactoryCredentialReference credential = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -357,6 +405,78 @@ internal static AzurePostgreSqlLinkedService DeserializeAzurePostgreSqlLinkedSer encryptedCredential = property0.Value.GetString(); continue; } + if (property0.NameEquals("servicePrincipalId"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + servicePrincipalId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("servicePrincipalKey"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + servicePrincipalKey = JsonSerializer.Deserialize(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("servicePrincipalCredentialType"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + servicePrincipalCredentialType = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("servicePrincipalEmbeddedCert"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + servicePrincipalEmbeddedCert = JsonSerializer.Deserialize(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("servicePrincipalEmbeddedCertPassword"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + servicePrincipalEmbeddedCertPassword = JsonSerializer.Deserialize(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("tenant"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + tenant = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("azureCloudType"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + azureCloudType = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("credential"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + credential = DataFactoryCredentialReference.DeserializeDataFactoryCredentialReference(property0.Value, options); + continue; + } } continue; } @@ -384,7 +504,15 @@ internal static AzurePostgreSqlLinkedService DeserializeAzurePostgreSqlLinkedSer timezone, encoding, password, - encryptedCredential); + encryptedCredential, + servicePrincipalId, + servicePrincipalKey, + servicePrincipalCredentialType, + servicePrincipalEmbeddedCert, + servicePrincipalEmbeddedCertPassword, + tenant, + azureCloudType, + credential); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.cs index 721b309f7d36..e8cfc8281487 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlLinkedService.cs @@ -42,7 +42,15 @@ public AzurePostgreSqlLinkedService() /// Gets or sets the .NET encoding that will be used to encode/decode PostgreSQL string data. Type: string. /// The Azure key vault secret reference of password in connection string. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal AzurePostgreSqlLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryElement server, DataFactoryElement port, DataFactoryElement username, DataFactoryElement database, DataFactoryElement sslMode, DataFactoryElement timeout, DataFactoryElement commandTimeout, DataFactoryElement trustServerCertificate, DataFactoryElement readBufferSize, DataFactoryElement timezone, DataFactoryElement encoding, DataFactoryKeyVaultSecret password, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + /// The ID of the service principal used to authenticate against Azure Database for PostgreSQL Flexible server. Type: string (or Expression with resultType string). + /// The key of the service principal used to authenticate against Azure Database for PostgreSQL Flexible server. + /// The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + /// Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + /// Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). + /// The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + /// Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). + /// The credential reference containing authentication information. + internal AzurePostgreSqlLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryElement server, DataFactoryElement port, DataFactoryElement username, DataFactoryElement database, DataFactoryElement sslMode, DataFactoryElement timeout, DataFactoryElement commandTimeout, DataFactoryElement trustServerCertificate, DataFactoryElement readBufferSize, DataFactoryElement timezone, DataFactoryElement encoding, DataFactoryKeyVaultSecret password, string encryptedCredential, DataFactoryElement servicePrincipalId, DataFactorySecret servicePrincipalKey, DataFactoryElement servicePrincipalCredentialType, DataFactorySecret servicePrincipalEmbeddedCert, DataFactorySecret servicePrincipalEmbeddedCertPassword, DataFactoryElement tenant, DataFactoryElement azureCloudType, DataFactoryCredentialReference credential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { ConnectionString = connectionString; Server = server; @@ -58,6 +66,14 @@ internal AzurePostgreSqlLinkedService(string linkedServiceType, string linkedSer Encoding = encoding; Password = password; EncryptedCredential = encryptedCredential; + ServicePrincipalId = servicePrincipalId; + ServicePrincipalKey = servicePrincipalKey; + ServicePrincipalCredentialType = servicePrincipalCredentialType; + ServicePrincipalEmbeddedCert = servicePrincipalEmbeddedCert; + ServicePrincipalEmbeddedCertPassword = servicePrincipalEmbeddedCertPassword; + Tenant = tenant; + AzureCloudType = azureCloudType; + Credential = credential; LinkedServiceType = linkedServiceType ?? "AzurePostgreSql"; } @@ -89,5 +105,21 @@ internal AzurePostgreSqlLinkedService(string linkedServiceType, string linkedSer public DataFactoryKeyVaultSecret Password { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } + /// The ID of the service principal used to authenticate against Azure Database for PostgreSQL Flexible server. Type: string (or Expression with resultType string). + public DataFactoryElement ServicePrincipalId { get; set; } + /// The key of the service principal used to authenticate against Azure Database for PostgreSQL Flexible server. + public DataFactorySecret ServicePrincipalKey { get; set; } + /// The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + public DataFactoryElement ServicePrincipalCredentialType { get; set; } + /// Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + public DataFactorySecret ServicePrincipalEmbeddedCert { get; set; } + /// Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). + public DataFactorySecret ServicePrincipalEmbeddedCertPassword { get; set; } + /// The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + public DataFactoryElement Tenant { get; set; } + /// Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). + public DataFactoryElement AzureCloudType { get; set; } + /// The credential reference containing authentication information. + public DataFactoryCredentialReference Credential { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.Serialization.cs index cf14ff166c29..f1b010897bf6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.Serialization.cs @@ -41,6 +41,16 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("preCopyScript"u8); JsonSerializer.Serialize(writer, PreCopyScript); } + if (Optional.IsDefined(WriteMethod)) + { + writer.WritePropertyName("writeMethod"u8); + writer.WriteStringValue(WriteMethod.Value.ToString()); + } + if (Optional.IsDefined(UpsertSettings)) + { + writer.WritePropertyName("upsertSettings"u8); + writer.WriteObjectValue(UpsertSettings, options); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -76,6 +86,8 @@ internal static AzurePostgreSqlSink DeserializeAzurePostgreSqlSink(JsonElement e return null; } DataFactoryElement preCopyScript = default; + AzurePostgreSqlWriteMethodEnum? writeMethod = default; + AzurePostgreSqlSinkUpsertSettings upsertSettings = default; string type = default; DataFactoryElement writeBatchSize = default; DataFactoryElement writeBatchTimeout = default; @@ -96,6 +108,24 @@ internal static AzurePostgreSqlSink DeserializeAzurePostgreSqlSink(JsonElement e preCopyScript = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("writeMethod"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeMethod = new AzurePostgreSqlWriteMethodEnum(property.Value.GetString()); + continue; + } + if (property.NameEquals("upsertSettings"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + upsertSettings = AzurePostgreSqlSinkUpsertSettings.DeserializeAzurePostgreSqlSinkUpsertSettings(property.Value, options); + continue; + } if (property.NameEquals("type"u8)) { type = property.Value.GetString(); @@ -167,7 +197,9 @@ internal static AzurePostgreSqlSink DeserializeAzurePostgreSqlSink(JsonElement e maxConcurrentConnections, disableMetricsCollection, additionalProperties, - preCopyScript); + preCopyScript, + writeMethod, + upsertSettings); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.cs index 279836497c32..d8577cd7ec6d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSink.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// A copy activity Azure PostgreSQL sink. + /// A copy activity Azure Database for PostgreSQL sink. public partial class AzurePostgreSqlSink : CopySink { /// Initializes a new instance of . @@ -30,13 +30,32 @@ public AzurePostgreSqlSink() /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// Additional Properties. /// A query to execute before starting the copy. Type: string (or Expression with resultType string). - internal AzurePostgreSqlSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement preCopyScript) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// The write behavior for the operation. Default is Bulk Insert. + /// Azure Database for PostgreSQL upsert option settings. + internal AzurePostgreSqlSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement preCopyScript, AzurePostgreSqlWriteMethodEnum? writeMethod, AzurePostgreSqlSinkUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { PreCopyScript = preCopyScript; + WriteMethod = writeMethod; + UpsertSettings = upsertSettings; CopySinkType = copySinkType ?? "AzurePostgreSqlSink"; } /// A query to execute before starting the copy. Type: string (or Expression with resultType string). public DataFactoryElement PreCopyScript { get; set; } + /// The write behavior for the operation. Default is Bulk Insert. + public AzurePostgreSqlWriteMethodEnum? WriteMethod { get; set; } + /// Azure Database for PostgreSQL upsert option settings. + internal AzurePostgreSqlSinkUpsertSettings UpsertSettings { get; set; } + /// Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). + public DataFactoryElement> UpsertKeys + { + get => UpsertSettings is null ? default : UpsertSettings.Keys; + set + { + if (UpsertSettings is null) + UpsertSettings = new AzurePostgreSqlSinkUpsertSettings(); + UpsertSettings.Keys = value; + } + } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.Serialization.cs new file mode 100644 index 000000000000..20335eece339 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.Serialization.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + internal partial class AzurePostgreSqlSinkUpsertSettings : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzurePostgreSqlSinkUpsertSettings)} does not support writing '{format}' format."); + } + + if (Optional.IsDefined(Keys)) + { + writer.WritePropertyName("keys"u8); + JsonSerializer.Serialize(writer, Keys); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + AzurePostgreSqlSinkUpsertSettings IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AzurePostgreSqlSinkUpsertSettings)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAzurePostgreSqlSinkUpsertSettings(document.RootElement, options); + } + + internal static AzurePostgreSqlSinkUpsertSettings DeserializeAzurePostgreSqlSinkUpsertSettings(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DataFactoryElement> keys = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("keys"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + keys = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AzurePostgreSqlSinkUpsertSettings(keys, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AzurePostgreSqlSinkUpsertSettings)} does not support writing '{options.Format}' format."); + } + } + + AzurePostgreSqlSinkUpsertSettings IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeAzurePostgreSqlSinkUpsertSettings(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AzurePostgreSqlSinkUpsertSettings)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.cs new file mode 100644 index 000000000000..74a9be515d67 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSinkUpsertSettings.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Azure Database for PostgreSQL upsert option settings. + internal partial class AzurePostgreSqlSinkUpsertSettings + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public AzurePostgreSqlSinkUpsertSettings() + { + } + + /// Initializes a new instance of . + /// Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). + /// Keeps track of any properties unknown to the library. + internal AzurePostgreSqlSinkUpsertSettings(DataFactoryElement> keys, IDictionary serializedAdditionalRawData) + { + Keys = keys; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Key column names for unique row identification. Type: array of strings (or Expression with resultType array of strings). + public DataFactoryElement> Keys { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSource.cs index 5ab3cdaec3c0..7a548fa0cc81 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlSource.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// A copy activity Azure PostgreSQL source. + /// A copy activity Azure Database for PostgreSQL source. public partial class AzurePostgreSqlSource : TabularSource { /// Initializes a new instance of . diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlWriteMethodEnum.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlWriteMethodEnum.cs new file mode 100644 index 000000000000..c78d65376f95 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzurePostgreSqlWriteMethodEnum.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// The write behavior for the operation. Default is Bulk Insert. + public readonly partial struct AzurePostgreSqlWriteMethodEnum : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AzurePostgreSqlWriteMethodEnum(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string BulkInsertValue = "BulkInsert"; + private const string CopyCommandValue = "CopyCommand"; + private const string UpsertValue = "Upsert"; + + /// BulkInsert. + public static AzurePostgreSqlWriteMethodEnum BulkInsert { get; } = new AzurePostgreSqlWriteMethodEnum(BulkInsertValue); + /// CopyCommand. + public static AzurePostgreSqlWriteMethodEnum CopyCommand { get; } = new AzurePostgreSqlWriteMethodEnum(CopyCommandValue); + /// Upsert. + public static AzurePostgreSqlWriteMethodEnum Upsert { get; } = new AzurePostgreSqlWriteMethodEnum(UpsertValue); + /// Determines if two values are the same. + public static bool operator ==(AzurePostgreSqlWriteMethodEnum left, AzurePostgreSqlWriteMethodEnum right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AzurePostgreSqlWriteMethodEnum left, AzurePostgreSqlWriteMethodEnum right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator AzurePostgreSqlWriteMethodEnum(string value) => new AzurePostgreSqlWriteMethodEnum(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AzurePostgreSqlWriteMethodEnum other && Equals(other); + /// + public bool Equals(AzurePostgreSqlWriteMethodEnum other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.Serialization.cs index 11d8f33f5995..e747b1aab110 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.Serialization.cs @@ -48,6 +48,16 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("alternateKeyName"u8); JsonSerializer.Serialize(writer, AlternateKeyName); } + if (Optional.IsDefined(BypassBusinessLogicExecution)) + { + writer.WritePropertyName("bypassBusinessLogicExecution"u8); + JsonSerializer.Serialize(writer, BypassBusinessLogicExecution); + } + if (Optional.IsDefined(BypassPowerAutomateFlows)) + { + writer.WritePropertyName("bypassPowerAutomateFlows"u8); + JsonSerializer.Serialize(writer, BypassPowerAutomateFlows); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -85,6 +95,8 @@ internal static CommonDataServiceForAppsSink DeserializeCommonDataServiceForApps DynamicsSinkWriteBehavior writeBehavior = default; DataFactoryElement ignoreNullValues = default; DataFactoryElement alternateKeyName = default; + DataFactoryElement bypassBusinessLogicExecution = default; + DataFactoryElement bypassPowerAutomateFlows = default; string type = default; DataFactoryElement writeBatchSize = default; DataFactoryElement writeBatchTimeout = default; @@ -119,6 +131,24 @@ internal static CommonDataServiceForAppsSink DeserializeCommonDataServiceForApps alternateKeyName = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("bypassBusinessLogicExecution"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassBusinessLogicExecution = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("bypassPowerAutomateFlows"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassPowerAutomateFlows = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } if (property.NameEquals("type"u8)) { type = property.Value.GetString(); @@ -192,7 +222,9 @@ internal static CommonDataServiceForAppsSink DeserializeCommonDataServiceForApps additionalProperties, writeBehavior, ignoreNullValues, - alternateKeyName); + alternateKeyName, + bypassBusinessLogicExecution, + bypassPowerAutomateFlows); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.cs index 426ec7e855d9..92ca843f7f8d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CommonDataServiceForAppsSink.cs @@ -34,11 +34,15 @@ public CommonDataServiceForAppsSink(DynamicsSinkWriteBehavior writeBehavior) /// The write behavior for the operation. /// The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - internal CommonDataServiceForAppsSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + internal CommonDataServiceForAppsSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName, DataFactoryElement bypassBusinessLogicExecution, DataFactoryElement bypassPowerAutomateFlows) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { WriteBehavior = writeBehavior; IgnoreNullValues = ignoreNullValues; AlternateKeyName = alternateKeyName; + BypassBusinessLogicExecution = bypassBusinessLogicExecution; + BypassPowerAutomateFlows = bypassPowerAutomateFlows; CopySinkType = copySinkType ?? "CommonDataServiceForAppsSink"; } @@ -53,5 +57,9 @@ internal CommonDataServiceForAppsSink() public DataFactoryElement IgnoreNullValues { get; set; } /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). public DataFactoryElement AlternateKeyName { get; set; } + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + public DataFactoryElement BypassBusinessLogicExecution { get; set; } + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement BypassPowerAutomateFlows { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs index e6f523deca06..c044e877d39f 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs @@ -24,7 +24,7 @@ public partial class CopyActivity : ExecutionActivity /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// , or is null. public CopyActivity(string name, CopyActivitySource source, CopySink sink) : base(name) @@ -63,7 +63,7 @@ public CopyActivity(string name, CopyActivitySource source, CopySink sink) : bas /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Copy activity translator. If not specified, tabular translator is used. /// Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -118,7 +118,7 @@ internal CopyActivity() /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public CopySink Sink { get; set; } /// diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs index 9fc72c8ea689..5caaf6288933 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs @@ -148,6 +148,7 @@ internal static CopySink DeserializeCopySink(JsonElement element, ModelReaderWri case "SqlMISink": return SqlMISink.DeserializeSqlMISink(element, options); case "SqlServerSink": return SqlServerSink.DeserializeSqlServerSink(element, options); case "SqlSink": return SqlSink.DeserializeSqlSink(element, options); + case "TeradataSink": return TeradataSink.DeserializeTeradataSink(element, options); case "WarehouseSink": return WarehouseSink.DeserializeWarehouseSink(element, options); } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs index 5a41ba3f8fae..9b5f1749072e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs @@ -14,7 +14,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// A copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public abstract partial class CopySink { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs index cb00726b00a9..e3144d43a173 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs @@ -179,7 +179,7 @@ internal static DataFactoryDatasetProperties DeserializeDataFactoryDatasetProper case "InformixTable": return InformixTableDataset.DeserializeInformixTableDataset(element, options); case "JiraObject": return JiraObjectDataset.DeserializeJiraObjectDataset(element, options); case "Json": return JsonDataset.DeserializeJsonDataset(element, options); - case "LakeHouseTable": return LakeHouseTableDataset.DeserializeLakeHouseTableDataset(element, options); + case "LakehouseTable": return LakeHouseTableDataset.DeserializeLakeHouseTableDataset(element, options); case "MagentoObject": return MagentoObjectDataset.DeserializeMagentoObjectDataset(element, options); case "MariaDBTable": return MariaDBTableDataset.DeserializeMariaDBTableDataset(element, options); case "MarketoObject": return MarketoObjectDataset.DeserializeMarketoObjectDataset(element, options); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs index 74942e2bd32e..863b255f4400 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs @@ -185,7 +185,7 @@ internal static DataFactoryLinkedServiceProperties DeserializeDataFactoryLinkedS case "Impala": return ImpalaLinkedService.DeserializeImpalaLinkedService(element, options); case "Informix": return InformixLinkedService.DeserializeInformixLinkedService(element, options); case "Jira": return JiraLinkedService.DeserializeJiraLinkedService(element, options); - case "LakeHouse": return LakeHouseLinkedService.DeserializeLakeHouseLinkedService(element, options); + case "Lakehouse": return LakeHouseLinkedService.DeserializeLakeHouseLinkedService(element, options); case "Magento": return MagentoLinkedService.DeserializeMagentoLinkedService(element, options); case "MariaDB": return MariaDBLinkedService.DeserializeMariaDBLinkedService(element, options); case "Marketo": return MarketoLinkedService.DeserializeMarketoLinkedService(element, options); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.Serialization.cs index 23092ca2210f..f4675b335fe0 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.Serialization.cs @@ -58,6 +58,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("logSettings"u8); writer.WriteObjectValue(LogSettings, options); } + if (Optional.IsDefined(ReturnMultistatementResult)) + { + writer.WritePropertyName("returnMultistatementResult"u8); + JsonSerializer.Serialize(writer, ReturnMultistatementResult); + } writer.WriteEndObject(); foreach (var item in AdditionalProperties) { @@ -105,6 +110,7 @@ internal static DataFactoryScriptActivity DeserializeDataFactoryScriptActivity(J DataFactoryElement scriptBlockExecutionTimeout = default; IList scripts = default; ScriptActivityTypeLogSettings logSettings = default; + DataFactoryElement returnMultistatementResult = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -229,6 +235,15 @@ internal static DataFactoryScriptActivity DeserializeDataFactoryScriptActivity(J logSettings = ScriptActivityTypeLogSettings.DeserializeScriptActivityTypeLogSettings(property0.Value, options); continue; } + if (property0.NameEquals("returnMultistatementResult"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + returnMultistatementResult = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } } continue; } @@ -248,7 +263,8 @@ internal static DataFactoryScriptActivity DeserializeDataFactoryScriptActivity(J policy, scriptBlockExecutionTimeout, scripts ?? new ChangeTrackingList(), - logSettings); + logSettings, + returnMultistatementResult); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.cs index 3193561d73c2..2742a01c5f63 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryScriptActivity.cs @@ -39,11 +39,13 @@ public DataFactoryScriptActivity(string name) : base(name) /// ScriptBlock execution timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). /// Array of script blocks. Type: array. /// Log settings of script activity. - internal DataFactoryScriptActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, DataFactoryElement scriptBlockExecutionTimeout, IList scripts, ScriptActivityTypeLogSettings logSettings) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) + /// Enable to retrieve result sets from multiple SQL statements and the number of rows affected by the DML statement. Supported connector: SnowflakeV2. Type: boolean (or Expression with resultType boolean). + internal DataFactoryScriptActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, DataFactoryElement scriptBlockExecutionTimeout, IList scripts, ScriptActivityTypeLogSettings logSettings, DataFactoryElement returnMultistatementResult) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) { ScriptBlockExecutionTimeout = scriptBlockExecutionTimeout; Scripts = scripts; LogSettings = logSettings; + ReturnMultistatementResult = returnMultistatementResult; ActivityType = activityType ?? "Script"; } @@ -58,5 +60,7 @@ internal DataFactoryScriptActivity() public IList Scripts { get; } /// Log settings of script activity. public ScriptActivityTypeLogSettings LogSettings { get; set; } + /// Enable to retrieve result sets from multiple SQL statements and the number of rows affected by the DML statement. Supported connector: SnowflakeV2. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement ReturnMultistatementResult { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.Serialization.cs index 7eaff7acd9ab..ed8a5f0ca680 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.Serialization.cs @@ -48,6 +48,16 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("alternateKeyName"u8); JsonSerializer.Serialize(writer, AlternateKeyName); } + if (Optional.IsDefined(BypassBusinessLogicExecution)) + { + writer.WritePropertyName("bypassBusinessLogicExecution"u8); + JsonSerializer.Serialize(writer, BypassBusinessLogicExecution); + } + if (Optional.IsDefined(BypassPowerAutomateFlows)) + { + writer.WritePropertyName("bypassPowerAutomateFlows"u8); + JsonSerializer.Serialize(writer, BypassPowerAutomateFlows); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -85,6 +95,8 @@ internal static DynamicsCrmSink DeserializeDynamicsCrmSink(JsonElement element, DynamicsSinkWriteBehavior writeBehavior = default; DataFactoryElement ignoreNullValues = default; DataFactoryElement alternateKeyName = default; + DataFactoryElement bypassBusinessLogicExecution = default; + DataFactoryElement bypassPowerAutomateFlows = default; string type = default; DataFactoryElement writeBatchSize = default; DataFactoryElement writeBatchTimeout = default; @@ -119,6 +131,24 @@ internal static DynamicsCrmSink DeserializeDynamicsCrmSink(JsonElement element, alternateKeyName = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("bypassBusinessLogicExecution"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassBusinessLogicExecution = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("bypassPowerAutomateFlows"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassPowerAutomateFlows = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } if (property.NameEquals("type"u8)) { type = property.Value.GetString(); @@ -192,7 +222,9 @@ internal static DynamicsCrmSink DeserializeDynamicsCrmSink(JsonElement element, additionalProperties, writeBehavior, ignoreNullValues, - alternateKeyName); + alternateKeyName, + bypassBusinessLogicExecution, + bypassPowerAutomateFlows); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.cs index 102d7f5ff869..6c2f08fd1b3c 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsCrmSink.cs @@ -34,11 +34,15 @@ public DynamicsCrmSink(DynamicsSinkWriteBehavior writeBehavior) /// The write behavior for the operation. /// The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - internal DynamicsCrmSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + internal DynamicsCrmSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName, DataFactoryElement bypassBusinessLogicExecution, DataFactoryElement bypassPowerAutomateFlows) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { WriteBehavior = writeBehavior; IgnoreNullValues = ignoreNullValues; AlternateKeyName = alternateKeyName; + BypassBusinessLogicExecution = bypassBusinessLogicExecution; + BypassPowerAutomateFlows = bypassPowerAutomateFlows; CopySinkType = copySinkType ?? "DynamicsCrmSink"; } @@ -53,5 +57,9 @@ internal DynamicsCrmSink() public DataFactoryElement IgnoreNullValues { get; set; } /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). public DataFactoryElement AlternateKeyName { get; set; } + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + public DataFactoryElement BypassBusinessLogicExecution { get; set; } + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement BypassPowerAutomateFlows { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.Serialization.cs index a6ae9572ab52..38507a42b891 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.Serialization.cs @@ -48,6 +48,16 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("alternateKeyName"u8); JsonSerializer.Serialize(writer, AlternateKeyName); } + if (Optional.IsDefined(BypassBusinessLogicExecution)) + { + writer.WritePropertyName("bypassBusinessLogicExecution"u8); + JsonSerializer.Serialize(writer, BypassBusinessLogicExecution); + } + if (Optional.IsDefined(BypassPowerAutomateFlows)) + { + writer.WritePropertyName("bypassPowerAutomateFlows"u8); + JsonSerializer.Serialize(writer, BypassPowerAutomateFlows); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -85,6 +95,8 @@ internal static DynamicsSink DeserializeDynamicsSink(JsonElement element, ModelR DynamicsSinkWriteBehavior writeBehavior = default; DataFactoryElement ignoreNullValues = default; DataFactoryElement alternateKeyName = default; + DataFactoryElement bypassBusinessLogicExecution = default; + DataFactoryElement bypassPowerAutomateFlows = default; string type = default; DataFactoryElement writeBatchSize = default; DataFactoryElement writeBatchTimeout = default; @@ -119,6 +131,24 @@ internal static DynamicsSink DeserializeDynamicsSink(JsonElement element, ModelR alternateKeyName = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("bypassBusinessLogicExecution"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassBusinessLogicExecution = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("bypassPowerAutomateFlows"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + bypassPowerAutomateFlows = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } if (property.NameEquals("type"u8)) { type = property.Value.GetString(); @@ -192,7 +222,9 @@ internal static DynamicsSink DeserializeDynamicsSink(JsonElement element, ModelR additionalProperties, writeBehavior, ignoreNullValues, - alternateKeyName); + alternateKeyName, + bypassBusinessLogicExecution, + bypassPowerAutomateFlows); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.cs index 94377b07aa39..4c0da655af5d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DynamicsSink.cs @@ -34,11 +34,15 @@ public DynamicsSink(DynamicsSinkWriteBehavior writeBehavior) /// The write behavior for the operation. /// The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). - internal DynamicsSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + internal DynamicsSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DynamicsSinkWriteBehavior writeBehavior, DataFactoryElement ignoreNullValues, DataFactoryElement alternateKeyName, DataFactoryElement bypassBusinessLogicExecution, DataFactoryElement bypassPowerAutomateFlows) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { WriteBehavior = writeBehavior; IgnoreNullValues = ignoreNullValues; AlternateKeyName = alternateKeyName; + BypassBusinessLogicExecution = bypassBusinessLogicExecution; + BypassPowerAutomateFlows = bypassPowerAutomateFlows; CopySinkType = copySinkType ?? "DynamicsSink"; } @@ -53,5 +57,9 @@ internal DynamicsSink() public DataFactoryElement IgnoreNullValues { get; set; } /// The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). public DataFactoryElement AlternateKeyName { get; set; } + /// Controls the bypass of Dataverse custom business logic. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + public DataFactoryElement BypassBusinessLogicExecution { get; set; } + /// Controls the bypass of Power Automate flows. Default is false. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement BypassPowerAutomateFlows { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumAuthenticationType.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumAuthenticationType.cs new file mode 100644 index 000000000000..0fc2453b6126 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumAuthenticationType.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// The authentication type to use. Type: string. Only used for V2. + public readonly partial struct GreenplumAuthenticationType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public GreenplumAuthenticationType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string BasicValue = "Basic"; + + /// Basic. + public static GreenplumAuthenticationType Basic { get; } = new GreenplumAuthenticationType(BasicValue); + /// Determines if two values are the same. + public static bool operator ==(GreenplumAuthenticationType left, GreenplumAuthenticationType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(GreenplumAuthenticationType left, GreenplumAuthenticationType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator GreenplumAuthenticationType(string value) => new GreenplumAuthenticationType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is GreenplumAuthenticationType other && Equals(other); + /// + public bool Equals(GreenplumAuthenticationType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.Serialization.cs index 27fece11f940..53aec1c724a6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.Serialization.cs @@ -53,6 +53,46 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("encryptedCredential"u8); writer.WriteStringValue(EncryptedCredential); } + if (Optional.IsDefined(AuthenticationType)) + { + writer.WritePropertyName("authenticationType"u8); + writer.WriteStringValue(AuthenticationType.Value.ToString()); + } + if (Optional.IsDefined(Host)) + { + writer.WritePropertyName("host"u8); + JsonSerializer.Serialize(writer, Host); + } + if (Optional.IsDefined(Port)) + { + writer.WritePropertyName("port"u8); + JsonSerializer.Serialize(writer, Port); + } + if (Optional.IsDefined(Username)) + { + writer.WritePropertyName("username"u8); + JsonSerializer.Serialize(writer, Username); + } + if (Optional.IsDefined(Database)) + { + writer.WritePropertyName("database"u8); + JsonSerializer.Serialize(writer, Database); + } + if (Optional.IsDefined(SslMode)) + { + writer.WritePropertyName("sslMode"u8); + JsonSerializer.Serialize(writer, SslMode); + } + if (Optional.IsDefined(ConnectionTimeout)) + { + writer.WritePropertyName("connectionTimeout"u8); + JsonSerializer.Serialize(writer, ConnectionTimeout); + } + if (Optional.IsDefined(CommandTimeout)) + { + writer.WritePropertyName("commandTimeout"u8); + JsonSerializer.Serialize(writer, CommandTimeout); + } writer.WriteEndObject(); foreach (var item in AdditionalProperties) { @@ -97,6 +137,14 @@ internal static GreenplumLinkedService DeserializeGreenplumLinkedService(JsonEle DataFactoryElement connectionString = default; DataFactoryKeyVaultSecret password = default; string encryptedCredential = default; + GreenplumAuthenticationType? authenticationType = default; + DataFactoryElement host = default; + DataFactoryElement port = default; + DataFactoryElement username = default; + DataFactoryElement database = default; + DataFactoryElement sslMode = default; + DataFactoryElement connectionTimeout = default; + DataFactoryElement commandTimeout = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -192,6 +240,78 @@ internal static GreenplumLinkedService DeserializeGreenplumLinkedService(JsonEle encryptedCredential = property0.Value.GetString(); continue; } + if (property0.NameEquals("authenticationType"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authenticationType = new GreenplumAuthenticationType(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("host"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + host = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("port"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + port = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("username"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + username = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("database"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + database = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("sslMode"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sslMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("connectionTimeout"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + connectionTimeout = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("commandTimeout"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + commandTimeout = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } } continue; } @@ -208,7 +328,15 @@ internal static GreenplumLinkedService DeserializeGreenplumLinkedService(JsonEle additionalProperties, connectionString, password, - encryptedCredential); + encryptedCredential, + authenticationType, + host, + port, + username, + database, + sslMode, + connectionTimeout, + commandTimeout); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.cs index 5b56bde0a440..09057763cc75 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/GreenplumLinkedService.cs @@ -31,11 +31,27 @@ public GreenplumLinkedService() /// An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. /// The Azure key vault secret reference of password in connection string. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal GreenplumLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecret password, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + /// The authentication type to use. Type: string. Only used for V2. + /// Host name for connection. Type: string. Only used for V2. + /// The port for the connection. Type: integer. Only used for V2. + /// Username for authentication. Type: string. Only used for V2. + /// Database name for connection. Type: string. Only used for V2. + /// SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. Type: integer. Only used for V2. + /// The time to wait (in seconds) while trying to establish a connection before terminating the attempt and generating an error. Type: integer. Only used for V2. + /// The time to wait (in seconds) while trying to execute a command before terminating the attempt and generating an error. Set to zero for infinity. Type: integer. Only used for V2. + internal GreenplumLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecret password, string encryptedCredential, GreenplumAuthenticationType? authenticationType, DataFactoryElement host, DataFactoryElement port, DataFactoryElement username, DataFactoryElement database, DataFactoryElement sslMode, DataFactoryElement connectionTimeout, DataFactoryElement commandTimeout) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { ConnectionString = connectionString; Password = password; EncryptedCredential = encryptedCredential; + AuthenticationType = authenticationType; + Host = host; + Port = port; + Username = username; + Database = database; + SslMode = sslMode; + ConnectionTimeout = connectionTimeout; + CommandTimeout = commandTimeout; LinkedServiceType = linkedServiceType ?? "Greenplum"; } @@ -45,5 +61,21 @@ internal GreenplumLinkedService(string linkedServiceType, string linkedServiceVe public DataFactoryKeyVaultSecret Password { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } + /// The authentication type to use. Type: string. Only used for V2. + public GreenplumAuthenticationType? AuthenticationType { get; set; } + /// Host name for connection. Type: string. Only used for V2. + public DataFactoryElement Host { get; set; } + /// The port for the connection. Type: integer. Only used for V2. + public DataFactoryElement Port { get; set; } + /// Username for authentication. Type: string. Only used for V2. + public DataFactoryElement Username { get; set; } + /// Database name for connection. Type: string. Only used for V2. + public DataFactoryElement Database { get; set; } + /// SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. Type: integer. Only used for V2. + public DataFactoryElement SslMode { get; set; } + /// The time to wait (in seconds) while trying to establish a connection before terminating the attempt and generating an error. Type: integer. Only used for V2. + public DataFactoryElement ConnectionTimeout { get; set; } + /// The time to wait (in seconds) while trying to execute a command before terminating the attempt and generating an error. Set to zero for infinity. Type: integer. Only used for V2. + public DataFactoryElement CommandTimeout { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.Serialization.cs index d755f969c71d..5652fa04657b 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.Serialization.cs @@ -76,6 +76,7 @@ internal static ImportSettings DeserializeImportSettings(JsonElement element, Mo { case "AzureDatabricksDeltaLakeImportCommand": return AzureDatabricksDeltaLakeImportCommand.DeserializeAzureDatabricksDeltaLakeImportCommand(element, options); case "SnowflakeImportCopyCommand": return SnowflakeImportCopyCommand.DeserializeSnowflakeImportCopyCommand(element, options); + case "TeradataImportCommand": return TeradataImportCommand.DeserializeTeradataImportCommand(element, options); } } return UnknownImportSettings.DeserializeUnknownImportSettings(element, options); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.cs index 8ac8805942ec..f56ba646c16b 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ImportSettings.cs @@ -13,7 +13,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// Import command settings. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include and . + /// The available derived classes include , and . /// public abstract partial class ImportSettings { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLinkedService.cs index 92471001ad9c..70e03e9fac36 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLinkedService.cs @@ -11,13 +11,13 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Microsoft Fabric LakeHouse linked service. + /// Microsoft Fabric Lakehouse linked service. public partial class LakeHouseLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . public LakeHouseLinkedService() { - LinkedServiceType = "LakeHouse"; + LinkedServiceType = "Lakehouse"; } /// Initializes a new instance of . @@ -29,9 +29,9 @@ public LakeHouseLinkedService() /// List of tags that can be used for describing the linked service. /// Additional Properties. /// The ID of Microsoft Fabric workspace. Type: string (or Expression with resultType string). - /// The ID of Microsoft Fabric LakeHouse artifact. Type: string (or Expression with resultType string). - /// The ID of the application used to authenticate against Microsoft Fabric LakeHouse. Type: string (or Expression with resultType string). - /// The Key of the application used to authenticate against Microsoft Fabric LakeHouse. + /// The ID of Microsoft Fabric Lakehouse artifact. Type: string (or Expression with resultType string). + /// The ID of the application used to authenticate against Microsoft Fabric Lakehouse. Type: string (or Expression with resultType string). + /// The Key of the application used to authenticate against Microsoft Fabric Lakehouse. /// The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. /// The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). @@ -46,16 +46,16 @@ internal LakeHouseLinkedService(string linkedServiceType, string linkedServiceVe EncryptedCredential = encryptedCredential; ServicePrincipalCredentialType = servicePrincipalCredentialType; ServicePrincipalCredential = servicePrincipalCredential; - LinkedServiceType = linkedServiceType ?? "LakeHouse"; + LinkedServiceType = linkedServiceType ?? "Lakehouse"; } /// The ID of Microsoft Fabric workspace. Type: string (or Expression with resultType string). public DataFactoryElement WorkspaceId { get; set; } - /// The ID of Microsoft Fabric LakeHouse artifact. Type: string (or Expression with resultType string). + /// The ID of Microsoft Fabric Lakehouse artifact. Type: string (or Expression with resultType string). public DataFactoryElement ArtifactId { get; set; } - /// The ID of the application used to authenticate against Microsoft Fabric LakeHouse. Type: string (or Expression with resultType string). + /// The ID of the application used to authenticate against Microsoft Fabric Lakehouse. Type: string (or Expression with resultType string). public DataFactoryElement ServicePrincipalId { get; set; } - /// The Key of the application used to authenticate against Microsoft Fabric LakeHouse. + /// The Key of the application used to authenticate against Microsoft Fabric Lakehouse. public DataFactorySecret ServicePrincipalKey { get; set; } /// The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). public DataFactoryElement Tenant { get; set; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLocation.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLocation.cs index c61c18b31544..5bf4824700f5 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLocation.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseLocation.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// The location of Microsoft Fabric LakeHouse Files dataset. + /// The location of Microsoft Fabric Lakehouse Files dataset. public partial class LakeHouseLocation : DatasetLocation { /// Initializes a new instance of . diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseReadSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseReadSettings.cs index 927838280235..2110bb88216d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseReadSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseReadSettings.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Microsoft Fabric LakeHouse Files read settings. + /// Microsoft Fabric Lakehouse Files read settings. public partial class LakeHouseReadSettings : StoreReadSettings { /// Initializes a new instance of . @@ -26,8 +26,8 @@ public LakeHouseReadSettings() /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// Additional Properties. /// If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). - /// Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string (or Expression with resultType string). - /// Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression with resultType string). + /// Microsoft Fabric Lakehouse Files wildcardFolderPath. Type: string (or Expression with resultType string). + /// Microsoft Fabric Lakehouse Files wildcardFileName. Type: string (or Expression with resultType string). /// Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). /// Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). /// Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). @@ -50,9 +50,9 @@ internal LakeHouseReadSettings(string storeReadSettingsType, DataFactoryElement< /// If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). public DataFactoryElement Recursive { get; set; } - /// Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string (or Expression with resultType string). + /// Microsoft Fabric Lakehouse Files wildcardFolderPath. Type: string (or Expression with resultType string). public DataFactoryElement WildcardFolderPath { get; set; } - /// Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or Expression with resultType string). + /// Microsoft Fabric Lakehouse Files wildcardFileName. Type: string (or Expression with resultType string). public DataFactoryElement WildcardFileName { get; set; } /// Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). public DataFactoryElement FileListPath { get; set; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableDataset.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableDataset.cs index 5caf9c31aa99..41980e4db68d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableDataset.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableDataset.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Microsoft Fabric LakeHouse Table. + /// Microsoft Fabric Lakehouse Table. public partial class LakeHouseTableDataset : DataFactoryDatasetProperties { /// Initializes a new instance of . @@ -21,7 +21,7 @@ public LakeHouseTableDataset(DataFactoryLinkedServiceReference linkedServiceName { Argument.AssertNotNull(linkedServiceName, nameof(linkedServiceName)); - DatasetType = "LakeHouseTable"; + DatasetType = "LakehouseTable"; } /// Initializes a new instance of . @@ -34,13 +34,13 @@ public LakeHouseTableDataset(DataFactoryLinkedServiceReference linkedServiceName /// List of tags that can be used for describing the Dataset. /// The folder that this Dataset is in. If not specified, Dataset will appear at the root level. /// Additional Properties. - /// The schema name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). - /// The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). + /// The schema name of Microsoft Fabric Lakehouse Table. Type: string (or Expression with resultType string). + /// The name of Microsoft Fabric Lakehouse Table. Type: string (or Expression with resultType string). internal LakeHouseTableDataset(string datasetType, string description, DataFactoryElement> structure, DataFactoryElement> schema, DataFactoryLinkedServiceReference linkedServiceName, IDictionary parameters, IList annotations, DatasetFolder folder, IDictionary additionalProperties, DataFactoryElement schemaTypePropertiesSchema, DataFactoryElement table) : base(datasetType, description, structure, schema, linkedServiceName, parameters, annotations, folder, additionalProperties) { SchemaTypePropertiesSchema = schemaTypePropertiesSchema; Table = table; - DatasetType = datasetType ?? "LakeHouseTable"; + DatasetType = datasetType ?? "LakehouseTable"; } /// Initializes a new instance of for deserialization. @@ -48,9 +48,9 @@ internal LakeHouseTableDataset() { } - /// The schema name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). + /// The schema name of Microsoft Fabric Lakehouse Table. Type: string (or Expression with resultType string). public DataFactoryElement SchemaTypePropertiesSchema { get; set; } - /// The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). + /// The name of Microsoft Fabric Lakehouse Table. Type: string (or Expression with resultType string). public DataFactoryElement Table { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSink.cs index 99f48bd3d125..2a8d214daa22 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSink.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// A copy activity for Microsoft Fabric LakeHouse Table sink. + /// A copy activity for Microsoft Fabric Lakehouse Table sink. public partial class LakeHouseTableSink : CopySink { /// Initializes a new instance of . @@ -29,7 +29,7 @@ public LakeHouseTableSink() /// The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// Additional Properties. - /// The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". + /// The type of table action for Lakehouse Table sink. Possible values include: "None", "Append", "Overwrite". /// Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be a new partition. Possible values include: "None", "PartitionByKey". /// Specify the partition column names from sink columns. Type: array of objects (or Expression with resultType array of objects). internal LakeHouseTableSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement tableActionOption, DataFactoryElement partitionOption, BinaryData partitionNameList) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) @@ -40,7 +40,7 @@ internal LakeHouseTableSink(string copySinkType, DataFactoryElement writeBa CopySinkType = copySinkType ?? "LakeHouseTableSink"; } - /// The type of table action for LakeHouse Table sink. Possible values include: "None", "Append", "Overwrite". + /// The type of table action for Lakehouse Table sink. Possible values include: "None", "Append", "Overwrite". public DataFactoryElement TableActionOption { get; set; } /// Create partitions in folder structure based on one or multiple columns. Each distinct column value (pair) will be a new partition. Possible values include: "None", "PartitionByKey". public DataFactoryElement PartitionOption { get; set; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSource.cs index ea21f4e175d9..d444c36a4a98 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseTableSource.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// A copy activity source for Microsoft Fabric LakeHouse Table. + /// A copy activity source for Microsoft Fabric Lakehouse Table. public partial class LakeHouseTableSource : CopyActivitySource { /// Initializes a new instance of . diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs index 636d6c758f4d..86a4bd8c88d4 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs @@ -11,7 +11,7 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Microsoft Fabric LakeHouse Files write settings. + /// Microsoft Fabric Lakehouse Files write settings. public partial class LakeHouseWriteSettings : StoreWriteSettings { /// Initializes a new instance of . diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleAuthenticationType.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleAuthenticationType.cs new file mode 100644 index 000000000000..cc6cab6ed098 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleAuthenticationType.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Authentication type for connecting to the Oracle database. Only used for Version 2.0. + public readonly partial struct OracleAuthenticationType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public OracleAuthenticationType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string BasicValue = "Basic"; + + /// Basic. + public static OracleAuthenticationType Basic { get; } = new OracleAuthenticationType(BasicValue); + /// Determines if two values are the same. + public static bool operator ==(OracleAuthenticationType left, OracleAuthenticationType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(OracleAuthenticationType left, OracleAuthenticationType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator OracleAuthenticationType(string value) => new OracleAuthenticationType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is OracleAuthenticationType other && Equals(other); + /// + public bool Equals(OracleAuthenticationType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.Serialization.cs index d804dce6e7fb..1d2b0c765860 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.Serialization.cs @@ -38,13 +38,86 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri base.JsonModelWriteCore(writer, options); writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); - writer.WritePropertyName("connectionString"u8); - JsonSerializer.Serialize(writer, ConnectionString); + if (Optional.IsDefined(ConnectionString)) + { + writer.WritePropertyName("connectionString"u8); + JsonSerializer.Serialize(writer, ConnectionString); + } + if (Optional.IsDefined(Server)) + { + writer.WritePropertyName("server"u8); + JsonSerializer.Serialize(writer, Server); + } + if (Optional.IsDefined(AuthenticationType)) + { + writer.WritePropertyName("authenticationType"u8); + writer.WriteStringValue(AuthenticationType.Value.ToString()); + } + if (Optional.IsDefined(Username)) + { + writer.WritePropertyName("username"u8); + JsonSerializer.Serialize(writer, Username); + } if (Optional.IsDefined(Password)) { writer.WritePropertyName("password"u8); JsonSerializer.Serialize(writer, Password); } + if (Optional.IsDefined(EncryptionClient)) + { + writer.WritePropertyName("encryptionClient"u8); + JsonSerializer.Serialize(writer, EncryptionClient); + } + if (Optional.IsDefined(EncryptionTypesClient)) + { + writer.WritePropertyName("encryptionTypesClient"u8); + JsonSerializer.Serialize(writer, EncryptionTypesClient); + } + if (Optional.IsDefined(CryptoChecksumClient)) + { + writer.WritePropertyName("cryptoChecksumClient"u8); + JsonSerializer.Serialize(writer, CryptoChecksumClient); + } + if (Optional.IsDefined(CryptoChecksumTypesClient)) + { + writer.WritePropertyName("cryptoChecksumTypesClient"u8); + JsonSerializer.Serialize(writer, CryptoChecksumTypesClient); + } + if (Optional.IsDefined(InitialLobFetchSize)) + { + writer.WritePropertyName("initialLobFetchSize"u8); + JsonSerializer.Serialize(writer, InitialLobFetchSize); + } + if (Optional.IsDefined(FetchSize)) + { + writer.WritePropertyName("fetchSize"u8); + JsonSerializer.Serialize(writer, FetchSize); + } + if (Optional.IsDefined(StatementCacheSize)) + { + writer.WritePropertyName("statementCacheSize"u8); + JsonSerializer.Serialize(writer, StatementCacheSize); + } + if (Optional.IsDefined(InitializationString)) + { + writer.WritePropertyName("initializationString"u8); + JsonSerializer.Serialize(writer, InitializationString); + } + if (Optional.IsDefined(EnableBulkLoad)) + { + writer.WritePropertyName("enableBulkLoad"u8); + JsonSerializer.Serialize(writer, EnableBulkLoad); + } + if (Optional.IsDefined(SupportV1DataTypes)) + { + writer.WritePropertyName("supportV1DataTypes"u8); + JsonSerializer.Serialize(writer, SupportV1DataTypes); + } + if (Optional.IsDefined(FetchTswtzAsTimestamp)) + { + writer.WritePropertyName("fetchTswtzAsTimestamp"u8); + JsonSerializer.Serialize(writer, FetchTswtzAsTimestamp); + } if (Optional.IsDefined(EncryptedCredential)) { writer.WritePropertyName("encryptedCredential"u8); @@ -92,7 +165,21 @@ internal static OracleLinkedService DeserializeOracleLinkedService(JsonElement e IDictionary parameters = default; IList annotations = default; DataFactoryElement connectionString = default; + DataFactoryElement server = default; + OracleAuthenticationType? authenticationType = default; + DataFactoryElement username = default; DataFactoryKeyVaultSecret password = default; + DataFactoryElement encryptionClient = default; + DataFactoryElement encryptionTypesClient = default; + DataFactoryElement cryptoChecksumClient = default; + DataFactoryElement cryptoChecksumTypesClient = default; + DataFactoryElement initialLobFetchSize = default; + DataFactoryElement fetchSize = default; + DataFactoryElement statementCacheSize = default; + DataFactoryElement initializationString = default; + DataFactoryElement enableBulkLoad = default; + DataFactoryElement supportV1DataTypes = default; + DataFactoryElement fetchTswtzAsTimestamp = default; string encryptedCredential = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); @@ -168,9 +255,40 @@ internal static OracleLinkedService DeserializeOracleLinkedService(JsonElement e { if (property0.NameEquals("connectionString"u8)) { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } connectionString = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("server"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + server = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("authenticationType"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authenticationType = new OracleAuthenticationType(property0.Value.GetString()); + continue; + } + if (property0.NameEquals("username"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + username = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("password"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -180,6 +298,105 @@ internal static OracleLinkedService DeserializeOracleLinkedService(JsonElement e password = JsonSerializer.Deserialize(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("encryptionClient"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionClient = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("encryptionTypesClient"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + encryptionTypesClient = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("cryptoChecksumClient"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + cryptoChecksumClient = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("cryptoChecksumTypesClient"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + cryptoChecksumTypesClient = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("initialLobFetchSize"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + initialLobFetchSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("fetchSize"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fetchSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("statementCacheSize"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + statementCacheSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("initializationString"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + initializationString = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("enableBulkLoad"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableBulkLoad = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("supportV1DataTypes"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + supportV1DataTypes = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("fetchTswtzAsTimestamp"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fetchTswtzAsTimestamp = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("encryptedCredential"u8)) { encryptedCredential = property0.Value.GetString(); @@ -200,7 +417,21 @@ internal static OracleLinkedService DeserializeOracleLinkedService(JsonElement e annotations ?? new ChangeTrackingList(), additionalProperties, connectionString, + server, + authenticationType, + username, password, + encryptionClient, + encryptionTypesClient, + cryptoChecksumClient, + cryptoChecksumTypesClient, + initialLobFetchSize, + fetchSize, + statementCacheSize, + initializationString, + enableBulkLoad, + supportV1DataTypes, + fetchTswtzAsTimestamp, encryptedCredential); } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.cs index 326260364258..8529c36c162e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/OracleLinkedService.cs @@ -11,17 +11,12 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Oracle database. + /// Oracle database. This linked service has supported version property. The Version 1.0 is scheduled for deprecation while your pipeline will continue to run after EOL but without any bug fix or new features. public partial class OracleLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . - /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - /// is null. - public OracleLinkedService(DataFactoryElement connectionString) + public OracleLinkedService() { - Argument.AssertNotNull(connectionString, nameof(connectionString)); - - ConnectionString = connectionString; LinkedServiceType = "Oracle"; } @@ -33,26 +28,77 @@ public OracleLinkedService(DataFactoryElement connectionString) /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. - /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Only used for Version 1.0. + /// The location of Oracle database you want to connect to, the supported forms include connector descriptor, Easy Connect (Plus) Naming and Oracle Net Services Name (Only self-hosted IR). Type: string. Only used for Version 2.0. + /// Authentication type for connecting to the Oracle database. Only used for Version 2.0. + /// The Oracle database username. Type: string. Only used for Version 2.0. /// The Azure key vault secret reference of password in connection string. + /// Specifies the encryption client behavior. Supported values are accepted, rejected, requested or required, default value is required. Type: string. Only used for Version 2.0. + /// Specifies the encryption algorithms that client can use. Supported values are AES128, AES192, AES256, 3DES112, 3DES168, default value is (AES256). Type: string. Only used for Version 2.0. + /// Specifies the desired data integrity behavior when this client connects to a server. Supported values are accepted, rejected, requested or required, default value is required. Type: string. Only used for Version 2.0. + /// Specifies the crypto-checksum algorithms that client can use. Supported values are SHA1, SHA256, SHA384, SHA512, default value is (SHA512). Type: string. Only used for Version 2.0. + /// Specifies the amount that the source initially fetches for LOB columns, default value is 0. Type: integer. Only used for Version 2.0. + /// Specifies the number of bytes that the driver allocates to fetch the data in one database round-trip, default value is 10485760. Type: integer. Only used for Version 2.0. + /// Specifies the number of cursors or statements to be cached for each database connection, default value is 0. Type: integer. Only used for Version 2.0. + /// Specifies a command that is issued immediately after connecting to the database to manage session settings. Type: string. Only used for Version 2.0. + /// Specifies whether to use bulk copy or batch insert when loading data into the database, default value is true. Type: boolean. Only used for Version 2.0. + /// Specifies whether to use the Version 1.0 data type mappings. Do not set this to true unless you want to keep backward compatibility with Version 1.0's data type mappings, default value is false. Type: boolean. Only used for Version 2.0. + /// Specifies whether the driver returns column value with the TIMESTAMP WITH TIME ZONE data type as DateTime or string. This setting is ignored if supportV1DataTypes is not true, default value is true. Type: boolean. Only used for Version 2.0. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal OracleLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecret password, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + internal OracleLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryElement server, OracleAuthenticationType? authenticationType, DataFactoryElement username, DataFactoryKeyVaultSecret password, DataFactoryElement encryptionClient, DataFactoryElement encryptionTypesClient, DataFactoryElement cryptoChecksumClient, DataFactoryElement cryptoChecksumTypesClient, DataFactoryElement initialLobFetchSize, DataFactoryElement fetchSize, DataFactoryElement statementCacheSize, DataFactoryElement initializationString, DataFactoryElement enableBulkLoad, DataFactoryElement supportV1DataTypes, DataFactoryElement fetchTswtzAsTimestamp, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { ConnectionString = connectionString; + Server = server; + AuthenticationType = authenticationType; + Username = username; Password = password; + EncryptionClient = encryptionClient; + EncryptionTypesClient = encryptionTypesClient; + CryptoChecksumClient = cryptoChecksumClient; + CryptoChecksumTypesClient = cryptoChecksumTypesClient; + InitialLobFetchSize = initialLobFetchSize; + FetchSize = fetchSize; + StatementCacheSize = statementCacheSize; + InitializationString = initializationString; + EnableBulkLoad = enableBulkLoad; + SupportV1DataTypes = supportV1DataTypes; + FetchTswtzAsTimestamp = fetchTswtzAsTimestamp; EncryptedCredential = encryptedCredential; LinkedServiceType = linkedServiceType ?? "Oracle"; } - /// Initializes a new instance of for deserialization. - internal OracleLinkedService() - { - } - - /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Only used for Version 1.0. public DataFactoryElement ConnectionString { get; set; } + /// The location of Oracle database you want to connect to, the supported forms include connector descriptor, Easy Connect (Plus) Naming and Oracle Net Services Name (Only self-hosted IR). Type: string. Only used for Version 2.0. + public DataFactoryElement Server { get; set; } + /// Authentication type for connecting to the Oracle database. Only used for Version 2.0. + public OracleAuthenticationType? AuthenticationType { get; set; } + /// The Oracle database username. Type: string. Only used for Version 2.0. + public DataFactoryElement Username { get; set; } /// The Azure key vault secret reference of password in connection string. public DataFactoryKeyVaultSecret Password { get; set; } + /// Specifies the encryption client behavior. Supported values are accepted, rejected, requested or required, default value is required. Type: string. Only used for Version 2.0. + public DataFactoryElement EncryptionClient { get; set; } + /// Specifies the encryption algorithms that client can use. Supported values are AES128, AES192, AES256, 3DES112, 3DES168, default value is (AES256). Type: string. Only used for Version 2.0. + public DataFactoryElement EncryptionTypesClient { get; set; } + /// Specifies the desired data integrity behavior when this client connects to a server. Supported values are accepted, rejected, requested or required, default value is required. Type: string. Only used for Version 2.0. + public DataFactoryElement CryptoChecksumClient { get; set; } + /// Specifies the crypto-checksum algorithms that client can use. Supported values are SHA1, SHA256, SHA384, SHA512, default value is (SHA512). Type: string. Only used for Version 2.0. + public DataFactoryElement CryptoChecksumTypesClient { get; set; } + /// Specifies the amount that the source initially fetches for LOB columns, default value is 0. Type: integer. Only used for Version 2.0. + public DataFactoryElement InitialLobFetchSize { get; set; } + /// Specifies the number of bytes that the driver allocates to fetch the data in one database round-trip, default value is 10485760. Type: integer. Only used for Version 2.0. + public DataFactoryElement FetchSize { get; set; } + /// Specifies the number of cursors or statements to be cached for each database connection, default value is 0. Type: integer. Only used for Version 2.0. + public DataFactoryElement StatementCacheSize { get; set; } + /// Specifies a command that is issued immediately after connecting to the database to manage session settings. Type: string. Only used for Version 2.0. + public DataFactoryElement InitializationString { get; set; } + /// Specifies whether to use bulk copy or batch insert when loading data into the database, default value is true. Type: boolean. Only used for Version 2.0. + public DataFactoryElement EnableBulkLoad { get; set; } + /// Specifies whether to use the Version 1.0 data type mappings. Do not set this to true unless you want to keep backward compatibility with Version 1.0's data type mappings, default value is false. Type: boolean. Only used for Version 2.0. + public DataFactoryElement SupportV1DataTypes { get; set; } + /// Specifies whether the driver returns column value with the TIMESTAMP WITH TIME ZONE data type as DateTime or string. This setting is ignored if supportV1DataTypes is not true, default value is true. Type: boolean. Only used for Version 2.0. + public DataFactoryElement FetchTswtzAsTimestamp { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.Serialization.cs index d56f2166ec34..17aa2fc87331 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.Serialization.cs @@ -40,8 +40,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WriteStartObject(); writer.WritePropertyName("host"u8); JsonSerializer.Serialize(writer, Host); - writer.WritePropertyName("serverVersion"u8); - JsonSerializer.Serialize(writer, ServerVersion); + if (Optional.IsDefined(ServerVersion)) + { + writer.WritePropertyName("serverVersion"u8); + JsonSerializer.Serialize(writer, ServerVersion); + } writer.WritePropertyName("catalog"u8); JsonSerializer.Serialize(writer, Catalog); if (Optional.IsDefined(Port)) @@ -66,6 +69,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("enableSsl"u8); JsonSerializer.Serialize(writer, EnableSsl); } + if (Optional.IsDefined(EnableServerCertificateValidation)) + { + writer.WritePropertyName("enableServerCertificateValidation"u8); + JsonSerializer.Serialize(writer, EnableServerCertificateValidation); + } if (Optional.IsDefined(TrustedCertPath)) { writer.WritePropertyName("trustedCertPath"u8); @@ -145,6 +153,7 @@ internal static PrestoLinkedService DeserializePrestoLinkedService(JsonElement e DataFactoryElement username = default; DataFactorySecret password = default; DataFactoryElement enableSsl = default; + DataFactoryElement enableServerCertificateValidation = default; DataFactoryElement trustedCertPath = default; DataFactoryElement useSystemTrustStore = default; DataFactoryElement allowHostNameCNMismatch = default; @@ -230,6 +239,10 @@ internal static PrestoLinkedService DeserializePrestoLinkedService(JsonElement e } if (property0.NameEquals("serverVersion"u8)) { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } serverVersion = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } @@ -279,6 +292,15 @@ internal static PrestoLinkedService DeserializePrestoLinkedService(JsonElement e enableSsl = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("enableServerCertificateValidation"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableServerCertificateValidation = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("trustedCertPath"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -351,6 +373,7 @@ internal static PrestoLinkedService DeserializePrestoLinkedService(JsonElement e username, password, enableSsl, + enableServerCertificateValidation, trustedCertPath, useSystemTrustStore, allowHostNameCNMismatch, diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.cs index c269fa6541fe..9f9ac77be9af 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/PrestoLinkedService.cs @@ -11,23 +11,20 @@ namespace Azure.ResourceManager.DataFactory.Models { - /// Presto server linked service. + /// Presto server linked service. This linked service has supported version property. The Version 1.0 is scheduled for deprecation while your pipeline will continue to run after EOL but without any bug fix or new features. public partial class PrestoLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . /// The IP address or host name of the Presto server. (i.e. 192.168.222.160). - /// The version of the Presto server. (i.e. 0.148-t). /// The catalog context for all request against the server. /// The authentication mechanism used to connect to the Presto server. - /// , or is null. - public PrestoLinkedService(DataFactoryElement host, DataFactoryElement serverVersion, DataFactoryElement catalog, PrestoAuthenticationType authenticationType) + /// or is null. + public PrestoLinkedService(DataFactoryElement host, DataFactoryElement catalog, PrestoAuthenticationType authenticationType) { Argument.AssertNotNull(host, nameof(host)); - Argument.AssertNotNull(serverVersion, nameof(serverVersion)); Argument.AssertNotNull(catalog, nameof(catalog)); Host = host; - ServerVersion = serverVersion; Catalog = catalog; AuthenticationType = authenticationType; LinkedServiceType = "Presto"; @@ -42,20 +39,21 @@ public PrestoLinkedService(DataFactoryElement host, DataFactoryElement List of tags that can be used for describing the linked service. /// Additional Properties. /// The IP address or host name of the Presto server. (i.e. 192.168.222.160). - /// The version of the Presto server. (i.e. 0.148-t). + /// The version of the Presto server. (i.e. 0.148-t) Only used for Version 1.0. /// The catalog context for all request against the server. - /// The TCP port that the Presto server uses to listen for client connections. The default value is 8080. + /// The TCP port that the Presto server uses to listen for client connections. The default value is 8080 when disable SSL, default value is 443 when enable SSL. /// The authentication mechanism used to connect to the Presto server. /// The user name used to connect to the Presto server. /// The password corresponding to the user name. - /// Specifies whether the connections to the server are encrypted using SSL. The default value is false. - /// The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - /// Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. - /// Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. - /// Specifies whether to allow self-signed certificates from the server. The default value is false. - /// The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. + /// Specifies whether the connections to the server are encrypted using SSL. The default value for legacy version is False. The default value for version 2.0 is True. + /// Specifies whether the connections to the server will validate server certificate, the default value is True. Only used for Version 2.0. + /// The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. Only used for Version 1.0. + /// Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. Only used for Version 1.0. + /// Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. Only used for Version 1.0. + /// Specifies whether to allow self-signed certificates from the server. The default value is false. Only used for Version 1.0. + /// The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value for Version 1.0 is the client system time zone. The default value for Version 2.0 is server system timeZone. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal PrestoLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement host, DataFactoryElement serverVersion, DataFactoryElement catalog, DataFactoryElement port, PrestoAuthenticationType authenticationType, DataFactoryElement username, DataFactorySecret password, DataFactoryElement enableSsl, DataFactoryElement trustedCertPath, DataFactoryElement useSystemTrustStore, DataFactoryElement allowHostNameCNMismatch, DataFactoryElement allowSelfSignedServerCert, DataFactoryElement timeZoneId, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + internal PrestoLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement host, DataFactoryElement serverVersion, DataFactoryElement catalog, DataFactoryElement port, PrestoAuthenticationType authenticationType, DataFactoryElement username, DataFactorySecret password, DataFactoryElement enableSsl, DataFactoryElement enableServerCertificateValidation, DataFactoryElement trustedCertPath, DataFactoryElement useSystemTrustStore, DataFactoryElement allowHostNameCNMismatch, DataFactoryElement allowSelfSignedServerCert, DataFactoryElement timeZoneId, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { Host = host; ServerVersion = serverVersion; @@ -65,6 +63,7 @@ internal PrestoLinkedService(string linkedServiceType, string linkedServiceVersi Username = username; Password = password; EnableSsl = enableSsl; + EnableServerCertificateValidation = enableServerCertificateValidation; TrustedCertPath = trustedCertPath; UseSystemTrustStore = useSystemTrustStore; AllowHostNameCNMismatch = allowHostNameCNMismatch; @@ -81,11 +80,11 @@ internal PrestoLinkedService() /// The IP address or host name of the Presto server. (i.e. 192.168.222.160). public DataFactoryElement Host { get; set; } - /// The version of the Presto server. (i.e. 0.148-t). + /// The version of the Presto server. (i.e. 0.148-t) Only used for Version 1.0. public DataFactoryElement ServerVersion { get; set; } /// The catalog context for all request against the server. public DataFactoryElement Catalog { get; set; } - /// The TCP port that the Presto server uses to listen for client connections. The default value is 8080. + /// The TCP port that the Presto server uses to listen for client connections. The default value is 8080 when disable SSL, default value is 443 when enable SSL. public DataFactoryElement Port { get; set; } /// The authentication mechanism used to connect to the Presto server. public PrestoAuthenticationType AuthenticationType { get; set; } @@ -93,17 +92,19 @@ internal PrestoLinkedService() public DataFactoryElement Username { get; set; } /// The password corresponding to the user name. public DataFactorySecret Password { get; set; } - /// Specifies whether the connections to the server are encrypted using SSL. The default value is false. + /// Specifies whether the connections to the server are encrypted using SSL. The default value for legacy version is False. The default value for version 2.0 is True. public DataFactoryElement EnableSsl { get; set; } - /// The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + /// Specifies whether the connections to the server will validate server certificate, the default value is True. Only used for Version 2.0. + public DataFactoryElement EnableServerCertificateValidation { get; set; } + /// The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. Only used for Version 1.0. public DataFactoryElement TrustedCertPath { get; set; } - /// Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + /// Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. Only used for Version 1.0. public DataFactoryElement UseSystemTrustStore { get; set; } - /// Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + /// Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. Only used for Version 1.0. public DataFactoryElement AllowHostNameCNMismatch { get; set; } - /// Specifies whether to allow self-signed certificates from the server. The default value is false. + /// Specifies whether to allow self-signed certificates from the server. The default value is false. Only used for Version 1.0. public DataFactoryElement AllowSelfSignedServerCert { get; set; } - /// The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. + /// The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value for Version 1.0 is the client system time zone. The default value for Version 2.0 is server system timeZone. public DataFactoryElement TimeZoneId { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.Serialization.cs index 068fff5d91cf..e2884fcd9c1c 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.Serialization.cs @@ -178,7 +178,7 @@ internal static SapOdpLinkedService DeserializeSapOdpLinkedService(JsonElement e DataFactorySecret password = default; DataFactoryElement messageServer = default; DataFactoryElement messageServerService = default; - DataFactoryElement sncMode = default; + DataFactoryElement sncMode = default; DataFactoryElement sncMyName = default; DataFactoryElement sncPartnerName = default; DataFactoryElement sncLibraryPath = default; @@ -346,7 +346,7 @@ internal static SapOdpLinkedService DeserializeSapOdpLinkedService(JsonElement e { continue; } - sncMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + sncMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("sncMyName"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.cs index 987b4353bc46..e6d2ad47c88e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapOdpLinkedService.cs @@ -37,7 +37,7 @@ public SapOdpLinkedService() /// Password to access the SAP server where the table is located. /// The hostname of the SAP Message Server. Type: string (or Expression with resultType string). /// The service name or port number of the Message Server. Type: string (or Expression with resultType string). - /// SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + /// SNC activation flag (Boolean) to access the SAP server where the table is located. Type: boolean (or Expression with resultType boolean). /// Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). /// Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). /// External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). @@ -46,7 +46,7 @@ public SapOdpLinkedService() /// The Logon Group for the SAP System. Type: string (or Expression with resultType string). /// The subscriber name. Type: string (or Expression with resultType string). /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal SapOdpLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement server, DataFactoryElement systemNumber, DataFactoryElement clientId, DataFactoryElement language, DataFactoryElement systemId, DataFactoryElement userName, DataFactorySecret password, DataFactoryElement messageServer, DataFactoryElement messageServerService, DataFactoryElement sncMode, DataFactoryElement sncMyName, DataFactoryElement sncPartnerName, DataFactoryElement sncLibraryPath, DataFactoryElement sncQop, DataFactoryElement x509CertificatePath, DataFactoryElement logonGroup, DataFactoryElement subscriberName, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + internal SapOdpLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement server, DataFactoryElement systemNumber, DataFactoryElement clientId, DataFactoryElement language, DataFactoryElement systemId, DataFactoryElement userName, DataFactorySecret password, DataFactoryElement messageServer, DataFactoryElement messageServerService, DataFactoryElement sncMode, DataFactoryElement sncMyName, DataFactoryElement sncPartnerName, DataFactoryElement sncLibraryPath, DataFactoryElement sncQop, DataFactoryElement x509CertificatePath, DataFactoryElement logonGroup, DataFactoryElement subscriberName, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { Server = server; SystemNumber = systemNumber; @@ -87,8 +87,8 @@ internal SapOdpLinkedService(string linkedServiceType, string linkedServiceVersi public DataFactoryElement MessageServer { get; set; } /// The service name or port number of the Message Server. Type: string (or Expression with resultType string). public DataFactoryElement MessageServerService { get; set; } - /// SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - public DataFactoryElement SncMode { get; set; } + /// SNC activation flag (Boolean) to access the SAP server where the table is located. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement SncMode { get; set; } /// Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). public DataFactoryElement SncMyName { get; set; } /// Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.Serialization.cs index 149905470bbe..3d88f36f0248 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.Serialization.cs @@ -168,7 +168,7 @@ internal static SapTableLinkedService DeserializeSapTableLinkedService(JsonEleme DataFactorySecret password = default; DataFactoryElement messageServer = default; DataFactoryElement messageServerService = default; - DataFactoryElement sncMode = default; + DataFactoryElement sncMode = default; DataFactoryElement sncMyName = default; DataFactoryElement sncPartnerName = default; DataFactoryElement sncLibraryPath = default; @@ -334,7 +334,7 @@ internal static SapTableLinkedService DeserializeSapTableLinkedService(JsonEleme { continue; } - sncMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + sncMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("sncMyName"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.cs index 53dfe9efa3c5..8462b0c5b5fc 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SapTableLinkedService.cs @@ -37,14 +37,14 @@ public SapTableLinkedService() /// Password to access the SAP server where the table is located. /// The hostname of the SAP Message Server. Type: string (or Expression with resultType string). /// The service name or port number of the Message Server. Type: string (or Expression with resultType string). - /// SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + /// SNC activation flag (Boolean) to access the SAP server where the table is located. Type: boolean (or Expression with resultType boolean). /// Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). /// Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). /// External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). /// SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). /// The Logon Group for the SAP System. Type: string (or Expression with resultType string). /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal SapTableLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement server, DataFactoryElement systemNumber, DataFactoryElement clientId, DataFactoryElement language, DataFactoryElement systemId, DataFactoryElement userName, DataFactorySecret password, DataFactoryElement messageServer, DataFactoryElement messageServerService, DataFactoryElement sncMode, DataFactoryElement sncMyName, DataFactoryElement sncPartnerName, DataFactoryElement sncLibraryPath, DataFactoryElement sncQop, DataFactoryElement logonGroup, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + internal SapTableLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement server, DataFactoryElement systemNumber, DataFactoryElement clientId, DataFactoryElement language, DataFactoryElement systemId, DataFactoryElement userName, DataFactorySecret password, DataFactoryElement messageServer, DataFactoryElement messageServerService, DataFactoryElement sncMode, DataFactoryElement sncMyName, DataFactoryElement sncPartnerName, DataFactoryElement sncLibraryPath, DataFactoryElement sncQop, DataFactoryElement logonGroup, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { Server = server; SystemNumber = systemNumber; @@ -83,8 +83,8 @@ internal SapTableLinkedService(string linkedServiceType, string linkedServiceVer public DataFactoryElement MessageServer { get; set; } /// The service name or port number of the Message Server. Type: string (or Expression with resultType string). public DataFactoryElement MessageServerService { get; set; } - /// SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). - public DataFactoryElement SncMode { get; set; } + /// SNC activation flag (Boolean) to access the SAP server where the table is located. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement SncMode { get; set; } /// Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). public DataFactoryElement SncMyName { get; set; } /// Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.Serialization.cs index 30d73be6a848..c12e87a4e56e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.Serialization.cs @@ -43,6 +43,11 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("tableName"u8); JsonSerializer.Serialize(writer, TableName); } + if (Optional.IsDefined(ValueType)) + { + writer.WritePropertyName("valueType"u8); + writer.WriteStringValue(ValueType.Value.ToString()); + } writer.WriteEndObject(); foreach (var item in AdditionalProperties) { @@ -87,6 +92,7 @@ internal static ServiceNowV2ObjectDataset DeserializeServiceNowV2ObjectDataset(J IList annotations = default; DatasetFolder folder = default; DataFactoryElement tableName = default; + ValueType? valueType = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -186,6 +192,15 @@ internal static ServiceNowV2ObjectDataset DeserializeServiceNowV2ObjectDataset(J tableName = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("valueType"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueType = new ValueType(property0.Value.GetString()); + continue; + } } continue; } @@ -202,7 +217,8 @@ internal static ServiceNowV2ObjectDataset DeserializeServiceNowV2ObjectDataset(J annotations ?? new ChangeTrackingList(), folder, additionalProperties, - tableName); + tableName, + valueType); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.cs index 0df01057e346..a10527a5da26 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ServiceNowV2ObjectDataset.cs @@ -35,9 +35,11 @@ public ServiceNowV2ObjectDataset(DataFactoryLinkedServiceReference linkedService /// The folder that this Dataset is in. If not specified, Dataset will appear at the root level. /// Additional Properties. /// The table name. Type: string (or Expression with resultType string). - internal ServiceNowV2ObjectDataset(string datasetType, string description, DataFactoryElement> structure, DataFactoryElement> schema, DataFactoryLinkedServiceReference linkedServiceName, IDictionary parameters, IList annotations, DatasetFolder folder, IDictionary additionalProperties, DataFactoryElement tableName) : base(datasetType, description, structure, schema, linkedServiceName, parameters, annotations, folder, additionalProperties) + /// Type of value copied from source. + internal ServiceNowV2ObjectDataset(string datasetType, string description, DataFactoryElement> structure, DataFactoryElement> schema, DataFactoryLinkedServiceReference linkedServiceName, IDictionary parameters, IList annotations, DatasetFolder folder, IDictionary additionalProperties, DataFactoryElement tableName, ValueType? valueType) : base(datasetType, description, structure, schema, linkedServiceName, parameters, annotations, folder, additionalProperties) { TableName = tableName; + ValueType = valueType; DatasetType = datasetType ?? "ServiceNowV2Object"; } @@ -48,5 +50,7 @@ internal ServiceNowV2ObjectDataset() /// The table name. Type: string (or Expression with resultType string). public DataFactoryElement TableName { get; set; } + /// Type of value copied from source. + public ValueType? ValueType { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.Serialization.cs new file mode 100644 index 000000000000..fbbf066ccdcc --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class TeradataImportCommand : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TeradataImportCommand)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(AdditionalFormatOptions)) + { + writer.WritePropertyName("additionalFormatOptions"u8); + JsonSerializer.Serialize(writer, AdditionalFormatOptions); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + TeradataImportCommand IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TeradataImportCommand)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTeradataImportCommand(document.RootElement, options); + } + + internal static TeradataImportCommand DeserializeTeradataImportCommand(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DataFactoryElement> additionalFormatOptions = default; + string type = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("additionalFormatOptions"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + additionalFormatOptions = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new TeradataImportCommand(type, additionalProperties, additionalFormatOptions); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TeradataImportCommand)} does not support writing '{options.Format}' format."); + } + } + + TeradataImportCommand IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTeradataImportCommand(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TeradataImportCommand)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.cs new file mode 100644 index 000000000000..3ad9eead859f --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataImportCommand.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Teradata import command settings. + public partial class TeradataImportCommand : ImportSettings + { + /// Initializes a new instance of . + public TeradataImportCommand() + { + ImportSettingsType = "TeradataImportCommand"; + } + + /// Initializes a new instance of . + /// The import setting type. + /// Additional Properties. + /// Additional format options for Teradata Copy Command. The format options only applies to direct copy from CSV source. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "timeFormat": "HHhMImSSs" }. + internal TeradataImportCommand(string importSettingsType, IDictionary additionalProperties, DataFactoryElement> additionalFormatOptions) : base(importSettingsType, additionalProperties) + { + AdditionalFormatOptions = additionalFormatOptions; + ImportSettingsType = importSettingsType ?? "TeradataImportCommand"; + } + + /// Additional format options for Teradata Copy Command. The format options only applies to direct copy from CSV source. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "timeFormat": "HHhMImSSs" }. + public DataFactoryElement> AdditionalFormatOptions { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.Serialization.cs index 60693311cb11..1a4bf2f0a648 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.Serialization.cs @@ -63,6 +63,36 @@ protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWri writer.WritePropertyName("password"u8); JsonSerializer.Serialize(writer, Password); } + if (Optional.IsDefined(SslMode)) + { + writer.WritePropertyName("sslMode"u8); + JsonSerializer.Serialize(writer, SslMode); + } + if (Optional.IsDefined(PortNumber)) + { + writer.WritePropertyName("portNumber"u8); + JsonSerializer.Serialize(writer, PortNumber); + } + if (Optional.IsDefined(HttpsPortNumber)) + { + writer.WritePropertyName("httpsPortNumber"u8); + JsonSerializer.Serialize(writer, HttpsPortNumber); + } + if (Optional.IsDefined(UseDataEncryption)) + { + writer.WritePropertyName("useDataEncryption"u8); + JsonSerializer.Serialize(writer, UseDataEncryption); + } + if (Optional.IsDefined(CharacterSet)) + { + writer.WritePropertyName("characterSet"u8); + JsonSerializer.Serialize(writer, CharacterSet); + } + if (Optional.IsDefined(MaxRespSize)) + { + writer.WritePropertyName("maxRespSize"u8); + JsonSerializer.Serialize(writer, MaxRespSize); + } if (Optional.IsDefined(EncryptedCredential)) { writer.WritePropertyName("encryptedCredential"u8); @@ -114,6 +144,12 @@ internal static TeradataLinkedService DeserializeTeradataLinkedService(JsonEleme TeradataAuthenticationType? authenticationType = default; DataFactoryElement username = default; DataFactorySecret password = default; + DataFactoryElement sslMode = default; + DataFactoryElement portNumber = default; + DataFactoryElement httpsPortNumber = default; + DataFactoryElement useDataEncryption = default; + DataFactoryElement characterSet = default; + DataFactoryElement maxRespSize = default; string encryptedCredential = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); @@ -232,6 +268,60 @@ internal static TeradataLinkedService DeserializeTeradataLinkedService(JsonEleme password = JsonSerializer.Deserialize(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("sslMode"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sslMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("portNumber"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + portNumber = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("httpsPortNumber"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + httpsPortNumber = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("useDataEncryption"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + useDataEncryption = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("characterSet"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + characterSet = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("maxRespSize"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxRespSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("encryptedCredential"u8)) { encryptedCredential = property0.Value.GetString(); @@ -256,6 +346,12 @@ internal static TeradataLinkedService DeserializeTeradataLinkedService(JsonEleme authenticationType, username, password, + sslMode, + portNumber, + httpsPortNumber, + useDataEncryption, + characterSet, + maxRespSize, encryptedCredential); } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.cs index 1c81a58b3201..53e60f1cb506 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataLinkedService.cs @@ -28,24 +28,36 @@ public TeradataLinkedService() /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. - /// Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Only applied for version 1.0. /// Server name for connection. Type: string (or Expression with resultType string). /// AuthenticationType to be used for connection. /// Username for authentication. Type: string (or Expression with resultType string). /// Password for authentication. + /// SSL mode for connection. Valid values including: “Disable”, “Allow”, “Prefer”, “Require”, “Verify-CA”, “Verify-Full”. Default value is “Verify-Full”. Type: string (or Expression with resultType string). Only applied for version 2.0. + /// The port numbers when connecting to server through non HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only used for V2. Only applied for version 2.0. + /// The port numbers when connecting to server through HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only applied for version 2.0. + /// Specifies whether to encrypt all communication with the Teradata database. Allowed values are 0 or 1. This setting will be ignored for HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only applied for version 2.0. + /// The character set to use for the connection. Type: string (or Expression with resultType string). Only applied for version 2.0. + /// The maximum size of the response buffer for SQL requests, in bytes. Type: integer. Only applied for version 2.0. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal TeradataLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryElement server, TeradataAuthenticationType? authenticationType, DataFactoryElement username, DataFactorySecret password, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) + internal TeradataLinkedService(string linkedServiceType, string linkedServiceVersion, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryElement server, TeradataAuthenticationType? authenticationType, DataFactoryElement username, DataFactorySecret password, DataFactoryElement sslMode, DataFactoryElement portNumber, DataFactoryElement httpsPortNumber, DataFactoryElement useDataEncryption, DataFactoryElement characterSet, DataFactoryElement maxRespSize, string encryptedCredential) : base(linkedServiceType, linkedServiceVersion, connectVia, description, parameters, annotations, additionalProperties) { ConnectionString = connectionString; Server = server; AuthenticationType = authenticationType; Username = username; Password = password; + SslMode = sslMode; + PortNumber = portNumber; + HttpsPortNumber = httpsPortNumber; + UseDataEncryption = useDataEncryption; + CharacterSet = characterSet; + MaxRespSize = maxRespSize; EncryptedCredential = encryptedCredential; LinkedServiceType = linkedServiceType ?? "Teradata"; } - /// Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Only applied for version 1.0. public DataFactoryElement ConnectionString { get; set; } /// Server name for connection. Type: string (or Expression with resultType string). public DataFactoryElement Server { get; set; } @@ -55,6 +67,18 @@ internal TeradataLinkedService(string linkedServiceType, string linkedServiceVer public DataFactoryElement Username { get; set; } /// Password for authentication. public DataFactorySecret Password { get; set; } + /// SSL mode for connection. Valid values including: “Disable”, “Allow”, “Prefer”, “Require”, “Verify-CA”, “Verify-Full”. Default value is “Verify-Full”. Type: string (or Expression with resultType string). Only applied for version 2.0. + public DataFactoryElement SslMode { get; set; } + /// The port numbers when connecting to server through non HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only used for V2. Only applied for version 2.0. + public DataFactoryElement PortNumber { get; set; } + /// The port numbers when connecting to server through HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only applied for version 2.0. + public DataFactoryElement HttpsPortNumber { get; set; } + /// Specifies whether to encrypt all communication with the Teradata database. Allowed values are 0 or 1. This setting will be ignored for HTTPS/TLS connections. Type: integer (or Expression with resultType integer). Only applied for version 2.0. + public DataFactoryElement UseDataEncryption { get; set; } + /// The character set to use for the connection. Type: string (or Expression with resultType string). Only applied for version 2.0. + public DataFactoryElement CharacterSet { get; set; } + /// The maximum size of the response buffer for SQL requests, in bytes. Type: integer. Only applied for version 2.0. + public DataFactoryElement MaxRespSize { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.Serialization.cs new file mode 100644 index 000000000000..6936b49faf7e --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.Serialization.cs @@ -0,0 +1,204 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class TeradataSink : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TeradataSink)} does not support writing '{format}' format."); + } + + base.JsonModelWriteCore(writer, options); + if (Optional.IsDefined(ImportSettings)) + { + writer.WritePropertyName("importSettings"u8); + writer.WriteObjectValue(ImportSettings, options); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value, ModelSerializationExtensions.JsonDocumentOptions)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + TeradataSink IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TeradataSink)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTeradataSink(document.RootElement, options); + } + + internal static TeradataSink DeserializeTeradataSink(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TeradataImportCommand importSettings = default; + string type = default; + DataFactoryElement writeBatchSize = default; + DataFactoryElement writeBatchTimeout = default; + DataFactoryElement sinkRetryCount = default; + DataFactoryElement sinkRetryWait = default; + DataFactoryElement maxConcurrentConnections = default; + DataFactoryElement disableMetricsCollection = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("importSettings"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + importSettings = TeradataImportCommand.DeserializeTeradataImportCommand(property.Value, options); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("writeBatchSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchSize = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("writeBatchTimeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchTimeout = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryCount = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryWait"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryWait = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("maxConcurrentConnections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxConcurrentConnections = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("disableMetricsCollection"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableMetricsCollection = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new TeradataSink( + type, + writeBatchSize, + writeBatchTimeout, + sinkRetryCount, + sinkRetryWait, + maxConcurrentConnections, + disableMetricsCollection, + additionalProperties, + importSettings); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TeradataSink)} does not support writing '{options.Format}' format."); + } + } + + TeradataSink IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeTeradataSink(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TeradataSink)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.cs new file mode 100644 index 000000000000..fb0335d47069 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TeradataSink.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// A copy activity Teradata sink. + public partial class TeradataSink : CopySink + { + /// Initializes a new instance of . + public TeradataSink() + { + CopySinkType = "TeradataSink"; + } + + /// Initializes a new instance of . + /// Copy sink type. + /// Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. + /// Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Sink retry count. Type: integer (or Expression with resultType integer). + /// Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). + /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). + /// Additional Properties. + /// Teradata import settings. + internal TeradataSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, TeradataImportCommand importSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + { + ImportSettings = importSettings; + CopySinkType = copySinkType ?? "TeradataSink"; + } + + /// Teradata import settings. + public TeradataImportCommand ImportSettings { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ValueType.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ValueType.cs new file mode 100644 index 000000000000..cc28ca88cb20 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ValueType.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Type of value copied from source. + public readonly partial struct ValueType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ValueType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ActualValue = "actual"; + private const string DisplayValue = "display"; + + /// actual. + public static ValueType Actual { get; } = new ValueType(ActualValue); + /// display. + public static ValueType Display { get; } = new ValueType(DisplayValue); + /// Determines if two values are the same. + public static bool operator ==(ValueType left, ValueType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ValueType left, ValueType right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator ValueType(string value) => new ValueType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ValueType other && Equals(other); + /// + public bool Equals(ValueType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md index 1a884660e10a..c3470a7d8a13 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md @@ -8,7 +8,7 @@ azure-arm: true csharp: true library-name: DataFactory namespace: Azure.ResourceManager.DataFactory -require: https://github.com/Azure/azure-rest-api-specs/blob/1982dfc5db1a54ac3cf824449e08590cee74d9a5/specification/datafactory/resource-manager/readme.md +require: /mnt/vss/_work/1/s/azure-rest-api-specs/specification/datafactory/resource-manager/readme.md output-folder: $(this-folder)/Generated clear-output-folder: true sample-gen: diff --git a/sdk/resourcemanager/ci.mgmt.yml b/sdk/resourcemanager/ci.mgmt.yml index d8bf790b5988..f28c28a5d39c 100644 --- a/sdk/resourcemanager/ci.mgmt.yml +++ b/sdk/resourcemanager/ci.mgmt.yml @@ -79,7 +79,6 @@ trigger: - sdk/dnsresolver/Azure.ResourceManager.DnsResolver - sdk/dynatrace/Azure.ResourceManager.Dynatrace - sdk/edgeorder/Azure.ResourceManager.EdgeOrder - - sdk/iotoperations/Azure.ResourceManager.IotOperations - sdk/edgezones/Azure.ResourceManager.EdgeZones - sdk/elastic/Azure.ResourceManager.Elastic - sdk/elasticsan/Azure.ResourceManager.ElasticSan @@ -108,6 +107,7 @@ trigger: - sdk/iot/Azure.ResourceManager.IotFirmwareDefense - sdk/iotcentral/Azure.ResourceManager.IotCentral - sdk/iothub/Azure.ResourceManager.IotHub + - sdk/iotoperations/Azure.ResourceManager.IotOperations - sdk/keyvault/Azure.ResourceManager.KeyVault - sdk/kubernetesconfiguration/Azure.ResourceManager.KubernetesConfiguration - sdk/kusto/Azure.ResourceManager.Kusto @@ -285,7 +285,6 @@ pr: - sdk/dnsresolver/Azure.ResourceManager.DnsResolver - sdk/dynatrace/Azure.ResourceManager.Dynatrace - sdk/edgeorder/Azure.ResourceManager.EdgeOrder - - sdk/iotoperations/Azure.ResourceManager.IotOperations - sdk/edgezones/Azure.ResourceManager.EdgeZones - sdk/elastic/Azure.ResourceManager.Elastic - sdk/elasticsan/Azure.ResourceManager.ElasticSan @@ -314,6 +313,7 @@ pr: - sdk/iot/Azure.ResourceManager.IotFirmwareDefense - sdk/iotcentral/Azure.ResourceManager.IotCentral - sdk/iothub/Azure.ResourceManager.IotHub + - sdk/iotoperations/Azure.ResourceManager.IotOperations - sdk/keyvault/Azure.ResourceManager.KeyVault - sdk/kubernetesconfiguration/Azure.ResourceManager.KubernetesConfiguration - sdk/kusto/Azure.ResourceManager.Kusto diff --git a/sdk/storage/ci.yml b/sdk/storage/ci.yml index 2ca0501b60bd..4e749e6b85b1 100644 --- a/sdk/storage/ci.yml +++ b/sdk/storage/ci.yml @@ -12,6 +12,8 @@ trigger: - sdk/storage/Azure.Storage.DataMovement/ - sdk/storage/Azure.Storage.DataMovement.Blobs/ - sdk/storage/Azure.Storage.DataMovement.Files/ + exclude: + - sdk/storage/Azure.ResourceManager.Storage/ - sdk/storage/Azure.Storage.DataMovement.Blobs.Files.Shares/ exclude: - sdk/storage/Azure.ResourceManager.Storage/