diff --git a/.vscode/cspell.json b/.vscode/cspell.json
index 9a864abbac2a..7c69ad6801ed 100644
--- a/.vscode/cspell.json
+++ b/.vscode/cspell.json
@@ -105,6 +105,7 @@
"sdk/cosmos/azure-cosmos-spark_3-4_2-12/**",
"sdk/cosmos/azure-cosmos-spark_3-5/**",
"sdk/cosmos/azure-cosmos-spark_3-5_2-12/**",
+ "sdk/cosmos/azure-cosmos-spark_3-5_2-13/**",
"sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/**",
"sdk/cosmos/fabric-cosmos-spark-auth_3/**",
"sdk/cosmos/azure-cosmos-encryption/**",
diff --git a/eng/.docsettings.yml b/eng/.docsettings.yml
index e8bdd397627d..e68c43e623e6 100644
--- a/eng/.docsettings.yml
+++ b/eng/.docsettings.yml
@@ -78,6 +78,7 @@ known_content_issues:
- ['sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md', '#3113']
- ['sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md', '#3113']
- ['sdk/cosmos/azure-cosmos-spark_3-5_2-12/README.md', '#3113']
+ - ['sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md', '#3113']
- ['sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/README.md', '#3113']
- ['sdk/cosmos/fabric-cosmos-spark-auth_3/README.md', '#3113']
- ['sdk/cosmos/azure-cosmos-spark_3_2-12/dev/README.md', '#3113']
diff --git a/eng/pipelines/templates/stages/cosmos-emulator-matrix.json b/eng/pipelines/templates/stages/cosmos-emulator-matrix.json
index b3cc2b8d46d0..d1a2da8a0c64 100644
--- a/eng/pipelines/templates/stages/cosmos-emulator-matrix.json
+++ b/eng/pipelines/templates/stages/cosmos-emulator-matrix.json
@@ -71,18 +71,25 @@
"JavaTestVersion": "1.11",
"AdditionalArgs": "-DACCOUNT_HOST=https://localhost:8081/ -Dhadoop.home.dir=D:/Hadoop -DCOSMOS.AZURE_COSMOS_DISABLE_NON_STREAMING_ORDER_BY=true"
},
- "Spark 3.5 Integration Tests targeting Cosmos Emulator - Java 8'": {
- "ProfileFlag": "-Dspark-e2e_3-5",
+ "Spark 3.5, Scala 2.12 Integration Tests targeting Cosmos Emulator - Java 8'": {
+ "ProfileFlag": "-Dspark-e2e_3-5_2-12",
"PROTOCOLS": "[\"Tcp\"]",
"DESIRED_CONSISTENCIES": "[\"Session\"]",
"JavaTestVersion": "1.8",
"AdditionalArgs": "-DACCOUNT_HOST=https://localhost:8081/ -Dhadoop.home.dir=D:/Hadoop -DCOSMOS.AZURE_COSMOS_DISABLE_NON_STREAMING_ORDER_BY=true"
},
- "Spark 3.5 Integration Tests targeting Cosmos Emulator - Java 11'": {
- "ProfileFlag": "-Dspark-e2e_3-5",
+ "Spark 3.5, Scala 2.12 Integration Tests targeting Cosmos Emulator - Java 17'": {
+ "ProfileFlag": "-Dspark-e2e_3-5_2-12",
"PROTOCOLS": "[\"Tcp\"]",
"DESIRED_CONSISTENCIES": "[\"Session\"]",
- "JavaTestVersion": "1.11",
+ "JavaTestVersion": "1.17",
+ "AdditionalArgs": "-DACCOUNT_HOST=https://localhost:8081/ -Dhadoop.home.dir=D:/Hadoop -DCOSMOS.AZURE_COSMOS_DISABLE_NON_STREAMING_ORDER_BY=true"
+ },
+ "Spark 3.5, Scala 2.13 Integration Tests targeting Cosmos Emulator - Java 17'": {
+ "ProfileFlag": "-Dspark-e2e_3-5_2-13",
+ "PROTOCOLS": "[\"Tcp\"]",
+ "DESIRED_CONSISTENCIES": "[\"Session\"]",
+ "JavaTestVersion": "1.17",
"AdditionalArgs": "-DACCOUNT_HOST=https://localhost:8081/ -Dhadoop.home.dir=D:/Hadoop -DCOSMOS.AZURE_COSMOS_DISABLE_NON_STREAMING_ORDER_BY=true"
},
"Kafka Integration Tests targeting Cosmos Emulator - Java 11": {
diff --git a/eng/versioning/external_dependencies.txt b/eng/versioning/external_dependencies.txt
index 35ba63950967..4d55c3d58d0d 100644
--- a/eng/versioning/external_dependencies.txt
+++ b/eng/versioning/external_dependencies.txt
@@ -256,8 +256,11 @@ cosmos-spark_3-3_org.apache.spark:spark-hive_2.12;3.3.0
cosmos-spark_3-4_org.apache.spark:spark-hive_2.12;3.4.0
cosmos-spark_3-5_org.apache.spark:spark-hive_2.12;3.5.0
cosmos_org.scala-lang:scala-library;2.12.19
-cosmos_org.scala-lang.modules:scala-java8-compat_2.12;0.8.0
+cosmos-scala213_org.scala-lang:scala-library;2.13.17
+cosmos_org.scala-lang.modules:scala-java8-compat_2.12;0.9.1
+cosmos-scala213_org.scala-lang.modules:scala-java8-compat_2.13;0.9.1
cosmos_io.projectreactor:reactor-scala-extensions_2.12;0.8.0
+cosmos-scala213_io.projectreactor:reactor-scala-extensions_2.13;0.8.0
cosmos_commons-io:commons-io;2.4
cosmos_com.microsoft.azure:applicationinsights-core;2.6.4
cosmos_io.micrometer:micrometer-core;1.15.1
@@ -266,9 +269,13 @@ cosmos_io.micrometer:micrometer-registry-graphite;1.15.1
# Cosmos Spark connector tests only
cosmos_org.scalatest:scalatest_2.12;3.2.2
+cosmos-scala213_org.scalatest:scalatest_2.13;3.2.2
cosmos_org.scalatest:scalatest-flatspec_2.12;3.2.3
+cosmos-scala213_org.scalatest:scalatest-flatspec_2.13;3.2.3
cosmos_org.scalactic:scalactic_2.12;3.2.3
+cosmos-scala213_org.scalactic:scalactic_2.13;3.2.3
cosmos_org.scalamock:scalamock_2.12;5.0.0
+cosmos-scala213_org.scalamock:scalamock_2.13;5.0.0
cosmos_com.globalmentor:hadoop-bare-naked-local-fs;0.1.0
cosmos_org.mockito:mockito-core;4.8.1
diff --git a/eng/versioning/pom_file_version_scanner.ps1 b/eng/versioning/pom_file_version_scanner.ps1
index 38546e5e02c9..654188d7deb4 100644
--- a/eng/versioning/pom_file_version_scanner.ps1
+++ b/eng/versioning/pom_file_version_scanner.ps1
@@ -723,31 +723,46 @@ Get-ChildItem -Path $Path -Filter pom*.xml -Recurse -File | ForEach-Object {
$potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: dependency is missing version element for groupId=$($groupId), artifactId=$($artifactId) should be "
continue
}
- if ($versionNode.NextSibling -and $versionNode.NextSibling.NodeType -eq "Comment")
+
+ if ($versionNode.FirstChild.Value.StartsWith('${'))
{
- # unfortunately because there are POM exceptions we need to wildcard the group which may be
- # something like _groupId
- if ($versionNode.NextSibling.Value.Trim() -notmatch "{x-version-update;(.+)?$($groupId):$($artifactId);\w+}")
+ # skip version checks when they have been intentionally applied via variables
+ }
+ else
+ {
+ if ($versionNode.NextSibling -and $versionNode.NextSibling.NodeType -eq "Comment")
{
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: dependency version update tag for groupId=$($groupId), artifactId=$($artifactId) should be "
+ # unfortunately because there are POM exceptions we need to wildcard the group which may be
+ # something like _groupId
+ if ($versionNode.FirstChild.Value.StartsWith('${'))
+ {
+ # skip version checks when they have been intentionally applied via variables
+ }
+ else
+ {
+ if ($versionNode.NextSibling.Value.Trim() -notmatch "{x-version-update;(.+)?$($groupId):$($artifactId);\w+}")
+ {
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: dependency version update tag for groupId=$($groupId), artifactId=$($artifactId) should be "
+ }
+ else
+ {
+ # verify the version tag and version are correct
+ $retVal = Test-Dependency-Tag-And-Version $libHash $extDepHash $versionNode.InnerText.Trim() $versionNode.NextSibling.Value $artifactsPerSDHashSet
+ if ($retVal)
+ {
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage $retVal
+ }
+ }
+ }
}
else
{
- # verify the version tag and version are correct
- $retVal = Test-Dependency-Tag-And-Version $libHash $extDepHash $versionNode.InnerText.Trim() $versionNode.NextSibling.Value $artifactsPerSDHashSet
- if ($retVal)
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage $retVal
- }
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: Missing dependency version update tag for groupId=$($groupId), artifactId=$($artifactId). The tag should be "
}
- }
- else
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: Missing dependency version update tag for groupId=$($groupId), artifactId=$($artifactId). The tag should be "
- }
+ }
}
# Verify every plugin has a group, artifact and version
# Verify every dependency has a group, artifact and version
@@ -882,80 +897,87 @@ Get-ChildItem -Path $Path -Filter pom*.xml -Recurse -File | ForEach-Object {
$groupId = $split[0]
$artifactId = $split[1]
$version = $split[2]
- # The groupId match has to be able to deal with _ for external dependency exceptions
- if (!$includeNode.NextSibling -or $includeNode.NextSibling.NodeType -ne "Comment")
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: is missing the update tag which should be "
- }
- elseif ($includeNode.NextSibling.Value.Trim() -notmatch "{x-include-update;(.+)?$($groupId):$($artifactId);(current|dependency|external_dependency)}")
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: version update tag for $($includeNode.InnerText) should be "
+
+ if ($version.StartsWith('[${')) {
+ # skip version checks when they have been intentionally applied via variables
}
else
{
- # verify that the version is formatted correctly
- if (!$version.StartsWith("[") -or !$version.EndsWith("]"))
+ # The groupId match has to be able to deal with _ for external dependency exceptions
+ if (!$includeNode.NextSibling -or $includeNode.NextSibling.NodeType -ne "Comment")
{
$hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: the version entry '$($version)' for '$($rawIncludeText)' is not formatted correctly. The include version needs to of the form '[]', the braces lock the include to a specific version for these entries. -->"
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: is missing the update tag which should be "
+ }
+ elseif ($includeNode.NextSibling.Value.Trim() -notmatch "{x-include-update;(.+)?$($groupId):$($artifactId);(current|dependency|external_dependency)}")
+ {
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: version update tag for $($includeNode.InnerText) should be "
}
- # verify the version has the correct value
else
{
- $versionWithoutBraces = $version.Substring(1, $version.Length -2)
- # the key into the dependency has needs to be created from the tag's group/artifact
- # entries in case it's an external dependency entry. Because this has already
- # been validated for format, grab the group:artifact
- $depKey = $includeNode.NextSibling.Value.Trim().Split(";")[1]
- $depType = $includeNode.NextSibling.Value.Trim().Split(";")[2]
- $depType = $depType.Substring(0, $depType.IndexOf("}"))
- if ($depType -eq $DependencyTypeExternal)
+ # verify that the version is formatted correctly
+ if (!$version.StartsWith("[") -or !$version.EndsWith("]"))
{
- if ($extDepHash.ContainsKey($depKey))
- {
- if ($versionWithoutBraces -ne $extDepHash[$depKey].ver)
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: $($depKey)'s version is '$($versionWithoutBraces)' but the external_dependency version is listed as $($extDepHash[$depKey].ver)"
- }
- }
- else
- {
- $hasError = $true
- $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: the groupId:artifactId entry '$($depKey)' for '$($rawIncludeText)' is not a valid external dependency. Please verify the entry exists in the external_dependencies.txt file. -->"
- }
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: the version entry '$($version)' for '$($rawIncludeText)' is not formatted correctly. The include version needs to of the form '[]', the braces lock the include to a specific version for these entries. -->"
}
+ # verify the version has the correct value
else
{
- # If the tag isn't external_dependency then verify it exists in the library hash
- if (!$libHash.ContainsKey($depKey))
- {
- $hasError = $true
- return "Error: $($depKey)'s dependency type is '$($depType)' but the dependency does not exist in any of the version_*.txt files. Should this be an external_dependency? Please ensure the dependency type is correct or the dependency is added to the appropriate file."
-
- }
- if ($depType -eq $DependencyTypeDependency)
+ $versionWithoutBraces = $version.Substring(1, $version.Length -2)
+ # the key into the dependency has needs to be created from the tag's group/artifact
+ # entries in case it's an external dependency entry. Because this has already
+ # been validated for format, grab the group:artifact
+ $depKey = $includeNode.NextSibling.Value.Trim().Split(";")[1]
+ $depType = $includeNode.NextSibling.Value.Trim().Split(";")[2]
+ $depType = $depType.Substring(0, $depType.IndexOf("}"))
+ if ($depType -eq $DependencyTypeExternal)
{
- if ($versionWithoutBraces -ne $libHash[$depKey].depVer)
+ if ($extDepHash.ContainsKey($depKey))
+ {
+ if ($versionWithoutBraces -ne $extDepHash[$depKey].ver)
+ {
+ $hasError = $true
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: $($depKey)'s version is '$($versionWithoutBraces)' but the external_dependency version is listed as $($extDepHash[$depKey].ver)"
+ }
+ }
+ else
{
$hasError = $true
- return "Error: $($depKey)'s is '$($versionString)' but the dependency version is listed as $($libHash[$depKey].depVer)"
+ $potentialLogMessage = Join-With-NewLine $potentialLogMessage "Error: the groupId:artifactId entry '$($depKey)' for '$($rawIncludeText)' is not a valid external dependency. Please verify the entry exists in the external_dependencies.txt file. -->"
}
}
- elseif ($depType -eq $DependencyTypeCurrent)
+ else
{
- # Verify that none of the 'current' dependencies are using a groupId that starts with 'unreleased_' or 'beta_'
- if ($depKey.StartsWith('unreleased_') -or $depKey.StartsWith('beta_'))
+ # If the tag isn't external_dependency then verify it exists in the library hash
+ if (!$libHash.ContainsKey($depKey))
{
$hasError = $true
- return "Error: $($versionUpdateString) is using an unreleased_ or beta_ dependency and trying to set current value. Only dependency versions can be set with an unreleased or beta dependency."
+ return "Error: $($depKey)'s dependency type is '$($depType)' but the dependency does not exist in any of the version_*.txt files. Should this be an external_dependency? Please ensure the dependency type is correct or the dependency is added to the appropriate file."
+
}
- if ($versionWithoutBraces -ne $libHash[$depKey].curVer)
+ if ($depType -eq $DependencyTypeDependency)
{
- $hasError = $true
- return "Error: $($depKey)'s is '$($versionString)' but the current version is listed as $($libHash[$depKey].curVer)"
+ if ($versionWithoutBraces -ne $libHash[$depKey].depVer)
+ {
+ $hasError = $true
+ return "Error: $($depKey)'s is '$($versionString)' but the dependency version is listed as $($libHash[$depKey].depVer)"
+ }
+ }
+ elseif ($depType -eq $DependencyTypeCurrent)
+ {
+ # Verify that none of the 'current' dependencies are using a groupId that starts with 'unreleased_' or 'beta_'
+ if ($depKey.StartsWith('unreleased_') -or $depKey.StartsWith('beta_'))
+ {
+ $hasError = $true
+ return "Error: $($versionUpdateString) is using an unreleased_ or beta_ dependency and trying to set current value. Only dependency versions can be set with an unreleased or beta dependency."
+ }
+ if ($versionWithoutBraces -ne $libHash[$depKey].curVer)
+ {
+ $hasError = $true
+ return "Error: $($depKey)'s is '$($versionString)' but the current version is listed as $($libHash[$depKey].curVer)"
+ }
}
}
}
diff --git a/eng/versioning/version_client.txt b/eng/versioning/version_client.txt
index 841936f5263a..fb1805f8efe0 100644
--- a/eng/versioning/version_client.txt
+++ b/eng/versioning/version_client.txt
@@ -115,6 +115,7 @@ com.azure:azure-cosmos-test;1.0.0-beta.16;1.0.0-beta.17
com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.42.0;4.43.0-beta.1
com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.42.0;4.43.0-beta.1
com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12;4.42.0;4.43.0-beta.1
+com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-13;4.42.0;4.43.0-beta.1
com.azure.cosmos.spark:fabric-cosmos-spark-auth_3;1.1.0;1.2.0-beta.1
com.azure:azure-cosmos-tests;1.0.0-beta.1;1.0.0-beta.1
com.azure:azure-data-appconfiguration;1.8.5;1.9.0-beta.1
diff --git a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml
index 5f8aa27d8f28..c594cbfb2f1f 100644
--- a/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark-account-data-resolver-sample/pom.xml
@@ -103,7 +103,7 @@
org.scala-lang.modules
scala-java8-compat_2.12
- 0.8.0
+ 0.9.1
io.projectreactor
@@ -281,7 +281,7 @@
org.apache.spark:spark-sql_2.12:[3.5.0]
commons-io:commons-io:[2.4]
org.scala-lang:scala-library:[2.12.19]
- org.scala-lang.modules:scala-java8-compat_2.12:[0.8.0]
+ org.scala-lang.modules:scala-java8-compat_2.12:[0.9.1]
io.projectreactor:reactor-scala-extensions_2.12:[0.8.0]
org.scalatest:scalatest_2.12:[3.2.2]
org.apache.maven.plugins:maven-antrun-plugin:[3.1.0]
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5/pom.xml
index 348d8c15caec..85337b09b220 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-5/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-5/pom.xml
@@ -24,6 +24,10 @@
${cosmos.spark.skip}
${cosmos.spark.skip}
3.5
+ 2.12
+ 3.5.0
+ 3.5.0
+ 2.18.4
@@ -93,8 +97,8 @@
org.apache.spark
- spark-sql_2.12
- 3.5.0
+ spark-sql_${scala.binary.version}
+ ${spark35.version}
io.netty
@@ -109,8 +113,8 @@
org.apache.spark
- spark-hive_2.12
- 3.5.0
+ spark-hive_${scala.binary.version}
+ ${spark-hive-version}
io.netty
@@ -130,8 +134,8 @@
com.fasterxml.jackson.module
- jackson-module-scala_2.12
- 2.18.4
+ jackson-module-scala_${scala.binary.version}
+ ${scala-jackson.version}
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml
index 22434193aeae..5bbe86d16d50 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml
@@ -40,6 +40,18 @@
false
+ 2.12
+ 2.12.19
+ 3.3.0
+ 3.4.0
+ 3.5.0
+ 0.9.1
+ 0.8.0
+ 3.2.2
+ 3.2.3
+ 3.2.3
+ 5.0.0
+ 2.18.4
@@ -102,13 +114,13 @@
- spark-e2e_3-5
+ spark-e2e_3-5_2-12
${basedir}/scalastyle_config.xml
- spark-e2e_3-5
+ spark-e2e_3-5_2-12
@@ -148,6 +160,7 @@
scalatest-maven-plugin
2.1.0
+ ${scalatest.argLine}
${project.build.directory}/surefire-reports
.
SparkTestSuite.txt
@@ -165,5 +178,14 @@
+
+ java9-plus
+
+ [9,)
+
+
+ --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false
+
+
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md
new file mode 100644
index 000000000000..6ae4d7ce01d5
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CHANGELOG.md
@@ -0,0 +1,13 @@
+## Release History
+
+### 4.43.0-beta.1 (Unreleased)
+
+#### Features Added
+
+#### Breaking Changes
+
+#### Bugs Fixed
+
+#### Other Changes
+
+### NOTE: See CHANGELOG.md in 3.3; 3.4 and 3.5 for scala 2.12 projects for changes prior to 4.43.0
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CONTRIBUTING.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CONTRIBUTING.md
new file mode 100644
index 000000000000..6949e20fb69d
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/CONTRIBUTING.md
@@ -0,0 +1,84 @@
+# Contributing
+This instruction is guideline for building and code contribution.
+
+## Prerequisites
+- JDK 8 and above
+- [Maven](https://maven.apache.org/) 3.0 and above
+
+## Build from source
+To build the project, run maven commands.
+
+```bash
+git clone https://github.com/Azure/azure-sdk-for-java.git
+cd sdk/cosmos/azure-cosmos-spark_3-5_2-13
+mvn clean install
+```
+
+## Test
+There are integration tests on azure and on emulator to trigger integration test execution
+against Azure Cosmos DB and against
+[Azure Cosmos DB Emulator](https://docs.microsoft.com/azure/cosmos-db/local-emulator), you need to
+follow the link to set up emulator before test execution.
+
+- Run unit tests
+```bash
+mvn clean install -Dgpg.skip
+```
+
+- Run integration tests
+ - on Azure
+ > **NOTE** Please note that integration test against Azure requires Azure Cosmos DB Document
+ API and will automatically create a Cosmos database in your Azure subscription, then there
+ will be **Azure usage fee.**
+
+ Integration tests will require a Azure Subscription. If you don't already have an Azure
+ subscription, you can activate your
+ [MSDN subscriber benefits](https://azure.microsoft.com/pricing/member-offers/msdn-benefits-details/)
+ or sign up for a [free Azure account](https://azure.microsoft.com/free/).
+
+ 1. Create an Azure Cosmos DB on Azure.
+ - Go to [Azure portal](https://portal.azure.com/) and click +New.
+ - Click Databases, and then click Azure Cosmos DB to create your database.
+ - Navigate to the database you have created, and click Access keys and copy your
+ URI and access keys for your database.
+
+ 2. Set environment variables ACCOUNT_HOST, ACCOUNT_KEY and SECONDARY_ACCOUNT_KEY, where value
+ of them are Cosmos account URI, primary key and secondary key.
+
+ So set the
+ second group environment variables NEW_ACCOUNT_HOST, NEW_ACCOUNT_KEY and
+ NEW_SECONDARY_ACCOUNT_KEY, the two group environment variables can be same.
+ 3. Run maven command with `integration-test-azure` profile.
+
+ ```bash
+ set ACCOUNT_HOST=your-cosmos-account-uri
+ set ACCOUNT_KEY=your-cosmos-account-primary-key
+ set SECONDARY_ACCOUNT_KEY=your-cosmos-account-secondary-key
+
+ set NEW_ACCOUNT_HOST=your-cosmos-account-uri
+ set NEW_ACCOUNT_KEY=your-cosmos-account-primary-key
+ set NEW_SECONDARY_ACCOUNT_KEY=your-cosmos-account-secondary-key
+ mvnw -P integration-test-azure clean install
+ ```
+
+ - on Emulator
+
+ Setup Azure Cosmos DB Emulator by following
+ [this instruction](https://docs.microsoft.com/azure/cosmos-db/local-emulator), and set
+ associated environment variables. Then run test with:
+ ```bash
+ mvnw -P integration-test-emulator install
+ ```
+
+
+- Skip tests execution
+```bash
+mvn clean install -Dgpg.skip -DskipTests
+```
+
+## Version management
+Developing version naming convention is like `0.1.2-beta.1`. Release version naming convention is like `0.1.2`.
+
+## Contribute to code
+Contribution is welcome. Please follow
+[this instruction](https://github.com/Azure/azure-sdk-for-java/blob/main/CONTRIBUTING.md) to contribute code.
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md
new file mode 100644
index 000000000000..250fba8bb05e
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/README.md
@@ -0,0 +1,192 @@
+# Azure Cosmos DB OLTP Spark 3 connector
+
+## Azure Cosmos DB OLTP Spark 3 connector for Spark 3.5
+**Azure Cosmos DB OLTP Spark connector** provides Apache Spark support for Azure Cosmos DB using
+the [SQL API][sql_api_query].
+[Azure Cosmos DB][cosmos_introduction] is a globally-distributed database service which allows
+developers to work with data using a variety of standard APIs, such as SQL, MongoDB, Cassandra, Graph, and Table.
+
+If you have any feedback or ideas on how to improve your experience please let us know here:
+https://github.com/Azure/azure-sdk-for-java/issues/new
+
+### Documentation
+
+- [Getting started](https://aka.ms/azure-cosmos-spark-3-quickstart)
+- [Catalog API](https://aka.ms/azure-cosmos-spark-3-catalog-api)
+- [Configuration Parameter Reference](https://aka.ms/azure-cosmos-spark-3-config)
+
+[//]: # (//TODO: add more sections)
+[//]: # (//TODO: Enable Client Logging)
+[//]: # (//TODO: Examples)
+[//]: # (//TODO: Next steps)
+[//]: # (//TODO: Key concepts)
+[//]: # (//TODO: Azure Cosmos DB Partition)
+[//]: # (//TODO: Troubleshooting)
+
+### Version Compatibility
+
+#### azure-cosmos-spark_3-5_2-12
+| Connector | Supported Spark Versions | Minimum Java Version | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes |
+|-----------|--------------------------|-----------------------|---------------------------|-------------------------------|---------------------------|
+| 4.41.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\*, 16.4 LTS | 1.3.\* |
+| 4.40.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.39.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.38.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.37.2 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.37.1 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.37.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.36.1 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.36.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.35.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.34.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.33.1 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.33.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.32.1 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.32.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.31.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.30.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+| 4.29.0 | 3.5.0 | [8, 11] | 2.12 | 14.\*, 15.\* | |
+
+Note: Java 8 prior to version 8u371 support is deprecated as of Spark 3.5.0. When using the Scala API, it is necessary for applications
+to use the same version of Scala that Spark was compiled for.
+
+#### azure-cosmos-spark_3-4_2-12
+| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes | Supported Fabric Runtimes |
+|-----------|--------------------------|------------------------|--------------------------|-------------------------------|---------------------------|
+| 4.41.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.40.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.39.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.38.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.37.2 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.37.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.37.0 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.36.1 | 3.4.0 - 3.4.1 | [8, 11] | 2.12 | 13.\* | |
+| 4.36.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.35.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.34.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.33.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.33.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.32.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.32.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.31.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.30.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.29.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.28.4 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.28.3 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.28.2 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.28.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.28.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.27.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.27.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.26.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.26.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.25.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.25.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.24.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.24.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.23.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.22.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.21.1 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+| 4.21.0 | 3.4.0 | [8, 11] | 2.12 | 13.* | |
+
+#### azure-cosmos-spark_3-3_2-12
+| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
+|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.41.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.40.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.39.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.38.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.37.2 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.37.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.37.0 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.36.1 | 3.3.0 - 3.3.2 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.36.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.35.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.34.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.33.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.33.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.32.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.32.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.31.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.30.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.29.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.28.4 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.28.3 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.28.2 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.28.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.28.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.27.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.27.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.26.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.26.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.25.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.25.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.24.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.24.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.23.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.22.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.21.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.21.0 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
+| 4.20.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.19.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.18.2 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.18.1 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.18.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.17.2 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.17.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.16.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+| 4.15.0 | 3.3.0 | [8, 11] | 2.12 | 11.\* |
+
+### Download
+
+You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime from Maven:
+`com.azure.cosmos.spark:azure-cosmos-spark_3-5_2-12:4.41.0`
+
+You can also integrate against Cosmos DB Spark Connector in your SBT project:
+```scala
+libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-5_2-12" % "4.41.0"
+```
+
+Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark).
+
+#### General
+
+If you encounter any bug, please file an issue [here](https://github.com/Azure/azure-sdk-for-java/issues/new).
+
+To suggest a new feature or changes that could be made, file an issue the same way you would for a bug.
+
+### License
+This project is under MIT license and uses and repackages other third party libraries as an uber jar.
+See [NOTICE.txt](https://github.com/Azure/azure-sdk-for-java/blob/main/NOTICE.txt).
+
+### Contributing
+
+This project welcomes contributions and suggestions. Most contributions require you to agree to a
+[Contributor License Agreement (CLA)][cla] declaring that you have the right to, and actually do, grant us the rights
+to use your contribution.
+
+When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate
+the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to
+do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct][coc]. For more information see the [Code of Conduct FAQ][coc_faq]
+or contact [opencode@microsoft.com][coc_contact] with any additional questions or comments.
+
+
+[source_code]: src
+[cosmos_introduction]: https://learn.microsoft.com/azure/cosmos-db/
+[cosmos_docs]: https://learn.microsoft.com/azure/cosmos-db/introduction
+[jdk]: https://learn.microsoft.com/java/azure/jdk/?view=azure-java-stable
+[maven]: https://maven.apache.org/
+[cla]: https://cla.microsoft.com
+[coc]: https://opensource.microsoft.com/codeofconduct/
+[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/
+[coc_contact]: mailto:opencode@microsoft.com
+[azure_subscription]: https://azure.microsoft.com/free/
+[samples]: https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/spring/azure-spring-data-cosmos/src/samples/java/com/azure/spring/data/cosmos
+[sql_api_query]: https://learn.microsoft.com/azure/cosmos-db/sql-api-sql-query
+[local_emulator]: https://learn.microsoft.com/azure/cosmos-db/local-emulator
+[local_emulator_export_ssl_certificates]: https://learn.microsoft.com/azure/cosmos-db/local-emulator-export-ssl-certificates
+[azure_cosmos_db_partition]: https://learn.microsoft.com/azure/cosmos-db/partition-data
+[sql_queries_in_cosmos]: https://learn.microsoft.com/azure/cosmos-db/tutorial-query-sql-api
+[sql_queries_getting_started]: https://learn.microsoft.com/azure/cosmos-db/sql-query-getting-started
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml
new file mode 100644
index 000000000000..c91292394edc
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml
@@ -0,0 +1,171 @@
+
+
+ 4.0.0
+
+ com.azure.cosmos.spark
+ azure-cosmos-spark_3-5
+ 0.0.1-beta.1
+ ../azure-cosmos-spark_3-5
+
+ com.azure.cosmos.spark
+ azure-cosmos-spark_3-5_2-13
+ 4.43.0-beta.1
+ jar
+ https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-5_2-13
+ OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API
+ OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API
+
+ scm:git:https://github.com/Azure/azure-sdk-for-java.git/sdk/cosmos/azure-cosmos-spark_3-5_2-13
+
+ https://github.com/Azure/azure-sdk-for-java/sdk/cosmos/azure-cosmos-spark_3-5_2-13
+
+
+ Microsoft Corporation
+ http://microsoft.com
+
+
+
+ The MIT License (MIT)
+ http://opensource.org/licenses/MIT
+ repo
+
+
+
+
+ microsoft
+ Microsoft Corporation
+
+
+
+ false
+ 2.13
+ 2.13.17
+ 0.9.1
+ 0.8.0
+ 3.2.2
+ 3.2.3
+ 3.2.3
+ 5.0.0
+
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 3.6.1
+
+
+ add-sources
+ generate-sources
+
+ add-source
+
+
+
+ ${basedir}/../azure-cosmos-spark_3/src/main/scala
+ ${basedir}/../azure-cosmos-spark_3-5/src/main/scala
+ ${basedir}/src/main/scala
+
+
+
+
+ add-test-sources
+ generate-test-sources
+
+ add-test-source
+
+
+
+ ${basedir}/../azure-cosmos-spark_3/src/test/scala
+ ${basedir}/../azure-cosmos-spark_3-5/src/test/scala
+ ${basedir}/src/test/scala
+
+
+
+
+ add-resources
+ generate-resources
+
+ add-resource
+
+
+
+ ${basedir}/../azure-cosmos-spark_3/src/main/resources
+ ${basedir}/../azure-cosmos-spark_3-5/src/main/resources
+ ${basedir}/src/main/resources
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+ 3.6.1
+
+
+
+
+
+
+ spark-e2e_3-5_2-13
+
+
+ ${basedir}/scalastyle_config.xml
+
+
+ spark-e2e_3-5_2-13
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.5.3
+
+
+ **/*.*
+ **/*Test.*
+ **/*Suite.*
+ **/*Spec.*
+
+ true
+
+
+
+ org.scalatest
+ scalatest-maven-plugin
+ 2.1.0
+
+ ${scalatest.argLine}
+ ${project.build.directory}/surefire-reports
+ .
+ SparkTestSuite.txt
+ (ITest|Test|Spec|Suite)
+
+
+
+ test
+
+ test
+
+
+
+
+
+
+
+
+ java9-plus
+
+ [9,)
+
+
+ --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false
+
+
+
+
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/scalastyle_config.xml b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/scalastyle_config.xml
new file mode 100644
index 000000000000..7a8ad2823fb8
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/scalastyle_config.xml
@@ -0,0 +1,130 @@
+
+ Scalastyle standard configuration
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/main/resources/azure-cosmos-spark.properties b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/main/resources/azure-cosmos-spark.properties
new file mode 100644
index 000000000000..ca812989b4f2
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/main/resources/azure-cosmos-spark.properties
@@ -0,0 +1,2 @@
+name=${project.artifactId}
+version=${project.version}
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientBuilderInterceptor b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientBuilderInterceptor
new file mode 100644
index 000000000000..0d43a5bfc657
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientBuilderInterceptor
@@ -0,0 +1 @@
+com.azure.cosmos.spark.TestCosmosClientBuilderInterceptor
\ No newline at end of file
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientInterceptor b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientInterceptor
new file mode 100644
index 000000000000..e2239720776d
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.CosmosClientInterceptor
@@ -0,0 +1 @@
+com.azure.cosmos.spark.TestFaultInjectionClientInterceptor
\ No newline at end of file
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.WriteOnRetryCommitInterceptor b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.WriteOnRetryCommitInterceptor
new file mode 100644
index 000000000000..c60cbf2f14e4
--- /dev/null
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-13/src/test/resources/META-INF/services/com.azure.cosmos.spark.WriteOnRetryCommitInterceptor
@@ -0,0 +1 @@
+com.azure.cosmos.spark.TestWriteOnRetryCommitInterceptor
\ No newline at end of file
diff --git a/sdk/cosmos/azure-cosmos-spark_3/dev/README.md b/sdk/cosmos/azure-cosmos-spark_3/dev/README.md
index d15bf4113612..30ba9fbfc0db 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/dev/README.md
+++ b/sdk/cosmos/azure-cosmos-spark_3/dev/README.md
@@ -46,6 +46,7 @@ mvn -e -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcodesnippet.skip=true
mvn -e -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcodesnippet.skip=true -Dspotbugs.skip=true -Dcheckstyle.skip=true -Drevapi.skip=true -pl ,azure-cosmos-spark_3-3_2-12 clean install
mvn -e -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcodesnippet.skip=true -Dspotbugs.skip=true -Dcheckstyle.skip=true -Drevapi.skip=true -pl ,azure-cosmos-spark_3-4_2-12 clean install
mvn -e -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcodesnippet.skip=true -Dspotbugs.skip=true -Dcheckstyle.skip=true -Drevapi.skip=true -pl ,azure-cosmos-spark_3-5_2-12 clean install
+mvn -e -DskipTests -Dgpg.skip -Dmaven.javadoc.skip=true -Dcodesnippet.skip=true -Dspotbugs.skip=true -Dcheckstyle.skip=true -Drevapi.skip=true -pl ,azure-cosmos-spark_3-5_2-13 clean install
```
Take these files:
diff --git a/sdk/cosmos/azure-cosmos-spark_3/pom.xml b/sdk/cosmos/azure-cosmos-spark_3/pom.xml
index e7b846207ee5..3dc770578e67 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3/pom.xml
@@ -38,7 +38,20 @@
${cosmos.spark.skip}
${cosmos.spark.skip}
- 3.1
+ 3.3
+
+ 2.12
+ 2.12.19
+ 3.3.0
+ 3.4.0
+ 3.5.0
+ 0.9.1
+ 0.8.0
+ 3.2.2
+ 3.2.3
+ 3.2.3
+ 5.0.0
+ 2.18.4
@@ -52,7 +65,7 @@
org.scala-lang
scala-library
- 2.12.19
+ ${scala.version}
provided
@@ -72,13 +85,13 @@
org.scala-lang.modules
- scala-java8-compat_2.12
- 0.8.0
+ scala-java8-compat_${scala.binary.version}
+ ${scala-java8-compat.version}
io.projectreactor
- reactor-scala-extensions_2.12
- 0.8.0
+ reactor-scala-extensions_${scala.binary.version}
+ ${reactor-scala-extensions.version}
io.projectreactor
@@ -170,26 +183,26 @@
org.scalatest
- scalatest_2.12
- 3.2.2
+ scalatest_${scala.binary.version}
+ ${scalatest.version}
test
org.scalatest
- scalatest-flatspec_2.12
- 3.2.3
+ scalatest-flatspec_${scala.binary.version}
+ ${scalatest-flatspec.version}
test
org.scalactic
- scalactic_2.12
- 3.2.3
+ scalactic_${scala.binary.version}
+ ${scalactic.version}
test
org.scalamock
- scalamock_2.12
- 5.0.0
+ scalamock_${scala.binary.version}
+ ${scalamock.version}
test
@@ -279,20 +292,25 @@
org.apache.commons:commons-lang3:[3.18.0]
org.slf4j:slf4j-api:[1.7.36]
- org.apache.spark:spark-sql_2.12:[3.3.0]
- org.apache.spark:spark-sql_2.12:[3.4.0]
- org.apache.spark:spark-sql_2.12:[3.5.0]
- org.scala-lang:scala-library:[2.12.19]
- org.scala-lang.modules:scala-java8-compat_2.12:[0.8.0]
- io.projectreactor:reactor-scala-extensions_2.12:[0.8.0]
- org.scalatest:scalatest_2.12:[3.2.2]
+ org.apache.spark:spark-sql_2.12:[${spark33.version}]
+ org.apache.spark:spark-sql_2.12:[${spark34.version}]
+ org.apache.spark:spark-sql_2.12:[${spark35.version}]
+ org.apache.spark:spark-sql_2.13:[${spark35.version}]
+ org.scala-lang:scala-library:[${scala.version}]
+ org.scala-lang.modules:scala-java8-compat_2.12:[${scala-java8-compat.version}]
+ org.scala-lang.modules:scala-java8-compat_2.13:[${scala-java8-compat.version}]
+ io.projectreactor:reactor-scala-extensions_2.12:[${reactor-scala-extensions.version}]
+ io.projectreactor:reactor-scala-extensions_2.13:[${reactor-scala-extensions.version}]
+ org.scalatest:scalatest_2.12:[${scalatest.version}]
+ org.scalatest:scalatest_2.13:[${scalatest.version}]
org.apache.maven.plugins:maven-antrun-plugin:[3.1.0]
net.alchim31.maven:scala-maven-plugin:[4.8.1]
org.scalastyle:scalastyle-maven-plugin:[1.0.0]
com.fasterxml.jackson.core:jackson-databind:[2.18.4]
com.fasterxml.jackson.datatype:jackson-datatype-jsr310:[2.18.4]
com.fasterxml.jackson.module:jackson-module-afterburner:[2.18.4]
- com.fasterxml.jackson.module:jackson-module-scala_2.12:[2.18.4]
+ com.fasterxml.jackson.module:jackson-module-scala_2.12:[${scala-jackson.version}]
+ com.fasterxml.jackson.module:jackson-module-scala_2.13:[${scala-jackson.version}]
io.micrometer:micrometer-registry-azure-monitor:[1.15.1]
io.micrometer:micrometer-core:[1.15.1]
com.microsoft.azure:applicationinsights-core:[2.6.4]
@@ -381,7 +399,7 @@
1.8
1.8
- 2.12.19
+ ${scala.version}
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosCatalogBase.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosCatalogBase.scala
index 27a69b012eef..3da6ea1cb0b9 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosCatalogBase.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosCatalogBase.scala
@@ -604,7 +604,7 @@ class CosmosCatalogBase
v.viewName.equals(viewName)) match {
case Some(existingView) =>
val updatedViewDefinitionsSnapshot: Array[ViewDefinition] =
- (ArrayBuffer(viewDefinitions: _*) - existingView).toArray
+ ArrayBuffer(viewDefinitions: _*).filterNot(_ == existingView).toArray
if (viewRepositorySnapshot.add(
lastBatchId + 1,
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientMetrics.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientMetrics.scala
index 99f906ff4a2e..05defc0884ed 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientMetrics.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosClientMetrics.scala
@@ -80,7 +80,7 @@ private[spark] object CosmosClientMetrics extends BasicLoggingTrait {
override protected def nullGaugeValue: java.lang.Double = Double.NaN
- override protected def close(): Unit = {
+ override def close(): Unit = {
super.close()
slf4JReporter match {
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosItemIdentityHelper.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosItemIdentityHelper.scala
index bc149f9623c8..c91b732d7b63 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosItemIdentityHelper.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosItemIdentityHelper.scala
@@ -6,6 +6,7 @@ package com.azure.cosmos.spark
import com.azure.cosmos.implementation.routing.PartitionKeyInternal
import com.azure.cosmos.implementation.{ImplementationBridgeHelpers, Utils}
import com.azure.cosmos.models.{CosmosItemIdentity, PartitionKey}
+import com.azure.cosmos.spark.diagnostics.BasicLoggingTrait
import java.util
@@ -13,7 +14,7 @@ import java.util
import scala.collection.JavaConverters._
// scalastyle:on underscore.import
-private[spark] object CosmosItemIdentityHelper {
+private[spark] object CosmosItemIdentityHelper extends BasicLoggingTrait {
// pattern will be recognized
// 1. id(idValue).pk(partitionKeyValue)
//
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosTableSchemaInferrer.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosTableSchemaInferrer.scala
index 41af8d70cef9..0b7d46dae134 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosTableSchemaInferrer.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/CosmosTableSchemaInferrer.scala
@@ -163,7 +163,7 @@ private object CosmosTableSchemaInferrer
.limit(cosmosInferenceConfig.inferSchemaSamplingSize)
.collect(Collectors.toList[ObjectNode]())
- schema = Some(inferSchema(feedResponseList.asScala,
+ schema = Some(inferSchema(feedResponseList.asScala.toSeq,
cosmosInferenceConfig.inferSchemaQuery.isDefined || cosmosInferenceConfig.includeSystemProperties,
cosmosInferenceConfig.inferSchemaQuery.isDefined || cosmosInferenceConfig.includeTimestamp,
cosmosInferenceConfig.allowNullForInferredProperties))
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/PointWriter.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/PointWriter.scala
index 45d45e033e53..8f07bf5339d5 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/PointWriter.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/PointWriter.scala
@@ -145,7 +145,7 @@ private class PointWriter(container: CosmosAsyncContainer,
executeAsync(() => createWithRetry(partitionKeyValue, objectNode, createOperation))
.onComplete {
case Success(_) =>
- promise.success(Unit)
+ promise.success(())
pendingPointWrites.remove(promise.future)
log.logItemWriteCompletion(createOperation)
case Failure(e) =>
@@ -167,7 +167,7 @@ private class PointWriter(container: CosmosAsyncContainer,
executeAsync(() => upsertWithRetry(partitionKeyValue, objectNode, upsertOperation))
.onComplete {
case Success(_) =>
- promise.success(Unit)
+ promise.success(())
pendingPointWrites.remove(promise.future)
log.logItemWriteCompletion(upsertOperation)
case Failure(e) =>
@@ -191,7 +191,7 @@ private class PointWriter(container: CosmosAsyncContainer,
executeAsync(() => deleteWithRetry(partitionKeyValue, objectNode, onlyIfNotModified, deleteOperation))
.onComplete {
case Success(_) =>
- promise.success(Unit)
+ promise.success(())
pendingPointWrites.remove(promise.future)
log.logItemWriteCompletion(deleteOperation)
case Failure(e) =>
@@ -214,7 +214,7 @@ private class PointWriter(container: CosmosAsyncContainer,
executeAsync(() => patchWithRetry(partitionKeyValue, objectNode, patchOperation, ignoreNotFound))
.onComplete {
case Success(_) =>
- promise.success(Unit)
+ promise.success(())
pendingPointWrites.remove(promise.future)
log.logItemWriteCompletion(patchOperation)
case Failure(e) =>
@@ -241,7 +241,7 @@ private class PointWriter(container: CosmosAsyncContainer,
executeAsync(() => replaceIfNotModifiedWithRetry(partitionKeyValue, objectNode, etag, replaceOperation))
.onComplete {
case Success(_) =>
- promise.success(Unit)
+ promise.success(())
pendingPointWrites.remove(promise.future)
log.logItemWriteCompletion(replaceOperation)
case Failure(e) =>
@@ -648,7 +648,7 @@ private class PointWriter(container: CosmosAsyncContainer,
override def call(): Unit = {
try {
work()
- future.complete(Unit)
+ future.complete(())
} catch {
case e: Exception =>
future.completeExceptionally(e)
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/TransientIOErrorsRetryingReadManyIterator.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/TransientIOErrorsRetryingReadManyIterator.scala
index 39053c668574..c51c5c1226e1 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/TransientIOErrorsRetryingReadManyIterator.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/TransientIOErrorsRetryingReadManyIterator.scala
@@ -59,7 +59,7 @@ private[spark] class TransientIOErrorsRetryingReadManyIterator[TSparkRow]
while (returnValue.isEmpty) {
if (readManyFilterBatchIterator.hasNext) {
// fetch items for the next readMany filter batch
- val readManyFilterBatch = readManyFilterBatchIterator.next()
+ val readManyFilterBatch = readManyFilterBatchIterator.next().toList
returnValue =
TransientErrorsRetryPolicy.executeWithRetry(
() => hasNextInternalCore(readManyFilterBatch),
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/catalog/CosmosCatalogCosmosSDKClient.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/catalog/CosmosCatalogCosmosSDKClient.scala
index d4e7e07f73f7..0ee2162eabd0 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/catalog/CosmosCatalogCosmosSDKClient.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/catalog/CosmosCatalogCosmosSDKClient.scala
@@ -4,7 +4,8 @@
package com.azure.cosmos.spark.catalog
import com.azure.cosmos.CosmosAsyncClient
-import com.azure.cosmos.models.{CosmosContainerProperties, ExcludedPath, FeedRange, IncludedPath, IndexingMode, IndexingPolicy, ModelBridgeInternal, PartitionKeyDefinition, PartitionKeyDefinitionVersion, PartitionKind, SparkModelBridgeInternal, ThroughputProperties}
+import com.azure.cosmos.models.{CosmosContainerProperties => ModelsCosmosContainerProperties, ExcludedPath, FeedRange, IncludedPath, IndexingMode, IndexingPolicy, ModelBridgeInternal, PartitionKeyDefinition, PartitionKeyDefinitionVersion, PartitionKind, SparkModelBridgeInternal, ThroughputProperties}
+import com.azure.cosmos.spark.catalog.{CosmosContainerProperties => CatalogCosmosContainerProperties}
import com.azure.cosmos.spark.diagnostics.BasicLoggingTrait
import com.azure.cosmos.spark.{ContainerFeedRangesCache, CosmosConstants, Exceptions}
import org.apache.spark.sql.connector.catalog.{NamespaceChange, TableChange}
@@ -21,6 +22,8 @@ import java.util.Collections
import scala.collection.JavaConverters._
// scalastyle:on underscore.import
+
+
private[spark] case class CosmosCatalogCosmosSDKClient(cosmosAsyncClient: CosmosAsyncClient)
extends CosmosCatalogClient
with BasicLoggingTrait {
@@ -80,15 +83,15 @@ private[spark] case class CosmosCatalogCosmosSDKClient(cosmosAsyncClient: Cosmos
val partitionKeyDefinition = getPartitionKeyDefinition(containerProperties)
val indexingPolicy = getIndexingPolicy(containerProperties)
- val cosmosContainerProperties = new CosmosContainerProperties(containerName, partitionKeyDefinition)
+ val cosmosContainerProperties = new ModelsCosmosContainerProperties(containerName, partitionKeyDefinition)
cosmosContainerProperties.setIndexingPolicy(indexingPolicy)
- CosmosContainerProperties.getDefaultTtlInSeconds(containerProperties) match {
+ CatalogCosmosContainerProperties.getDefaultTtlInSeconds(containerProperties) match {
case Some(ttl) => cosmosContainerProperties.setDefaultTimeToLiveInSeconds(ttl)
case None =>
}
- CosmosContainerProperties.getAnalyticalStoreTtlInSeconds(containerProperties) match {
+ CatalogCosmosContainerProperties.getAnalyticalStoreTtlInSeconds(containerProperties) match {
case Some(ttl) => cosmosContainerProperties.setAnalyticalStoreTimeToLiveInSeconds(ttl)
case None =>
}
@@ -147,15 +150,15 @@ private[spark] case class CosmosCatalogCosmosSDKClient(cosmosAsyncClient: Cosmos
cosmosAsyncClient.getDatabase(databaseName).read().asScala.`then`()
private def getIndexingPolicy(containerProperties: Map[String, String]): IndexingPolicy = {
- val indexingPolicySpecification = CosmosContainerProperties.getIndexingPolicy(containerProperties)
+ val indexingPolicySpecification = CatalogCosmosContainerProperties.getIndexingPolicy(containerProperties)
//scalastyle:on multiple.string.literals
- if (CosmosContainerProperties.AllPropertiesIndexingPolicyName.equalsIgnoreCase(indexingPolicySpecification)) {
+ if (CatalogCosmosContainerProperties.AllPropertiesIndexingPolicyName.equalsIgnoreCase(indexingPolicySpecification)) {
new IndexingPolicy()
.setAutomatic(true)
.setIndexingMode(IndexingMode.CONSISTENT)
.setIncludedPaths(util.Arrays.asList(new IncludedPath("/*")))
.setExcludedPaths(util.Arrays.asList(new ExcludedPath(raw"""/"_etag"/?""")))
- } else if (CosmosContainerProperties.OnlySystemPropertiesIndexingPolicyName.equalsIgnoreCase(indexingPolicySpecification)) {
+ } else if (CatalogCosmosContainerProperties.OnlySystemPropertiesIndexingPolicyName.equalsIgnoreCase(indexingPolicySpecification)) {
new IndexingPolicy()
.setAutomatic(true)
.setIndexingMode(IndexingMode.CONSISTENT)
@@ -168,42 +171,38 @@ private[spark] case class CosmosCatalogCosmosSDKClient(cosmosAsyncClient: Cosmos
}
private def getPartitionKeyDefinition(containerProperties: Map[String, String]): PartitionKeyDefinition = {
- val partitionKeyPath = CosmosContainerProperties.getPartitionKeyPath(containerProperties)
+ val partitionKeyPath = CatalogCosmosContainerProperties.getPartitionKeyPath(containerProperties)
val partitionKeyDef = new PartitionKeyDefinition
val paths = new util.ArrayList[String]
val pathList = partitionKeyPath.split(",").toList
if (pathList.size >= 2) {
- partitionKeyDef.setKind(CosmosContainerProperties.getPartitionKeyKind(containerProperties) match {
- case Some(pkKind) => {
- if (pkKind == PartitionKind.HASH.toString) {
- throw new IllegalArgumentException("PartitionKind HASH is not supported for multi-hash partition key")
- }
- PartitionKind.MULTI_HASH
- }
+ partitionKeyDef.setKind(CatalogCosmosContainerProperties.getPartitionKeyKind(containerProperties) match {
+ case Some(pkKind) =>
+ if (pkKind == PartitionKind.HASH.toString) {
+ throw new IllegalArgumentException("PartitionKind HASH is not supported for multi-hash partition key")
+ }
+ PartitionKind.MULTI_HASH
case None => PartitionKind.MULTI_HASH
})
- partitionKeyDef.setVersion(CosmosContainerProperties.getPartitionKeyVersion(containerProperties) match {
+ partitionKeyDef.setVersion(CatalogCosmosContainerProperties.getPartitionKeyVersion(containerProperties) match {
case Some(pkVersion) =>
- {
- if (pkVersion == PartitionKeyDefinitionVersion.V1.toString) {
- throw new IllegalArgumentException("PartitionKeyVersion V1 is not supported for multi-hash partition key")
- }
- PartitionKeyDefinitionVersion.V2
+ if (pkVersion == PartitionKeyDefinitionVersion.V1.toString) {
+ throw new IllegalArgumentException("PartitionKeyVersion V1 is not supported for multi-hash partition key")
}
+ PartitionKeyDefinitionVersion.V2
case None => PartitionKeyDefinitionVersion.V2
})
pathList.foreach(path => paths.add(path.trim))
} else {
- partitionKeyDef.setKind(CosmosContainerProperties.getPartitionKeyKind(containerProperties) match {
- case Some(pkKind) => {
- if (pkKind == PartitionKind.MULTI_HASH.toString) {
- throw new IllegalArgumentException("PartitionKind MULTI_HASH is not supported for single-hash partition key")
- }
- PartitionKind.HASH
- }
+ partitionKeyDef.setKind(CatalogCosmosContainerProperties.getPartitionKeyKind(containerProperties) match {
+ case Some(pkKind) =>
+ if (pkKind == PartitionKind.MULTI_HASH.toString) {
+ throw new IllegalArgumentException("PartitionKind MULTI_HASH is not supported for single-hash partition key")
+ }
+ PartitionKind.HASH
case None => PartitionKind.HASH
})
- CosmosContainerProperties.getPartitionKeyVersion(containerProperties) match {
+ CatalogCosmosContainerProperties.getPartitionKeyVersion(containerProperties) match {
case Some(pkVersion) => partitionKeyDef.setVersion(PartitionKeyDefinitionVersion.valueOf(pkVersion))
case None =>
}
@@ -278,10 +277,10 @@ private[spark] case class CosmosCatalogCosmosSDKClient(cosmosAsyncClient: Cosmos
// scalastyle:off method.length
private def generateTblProperties
(
- metadata: (CosmosContainerProperties, List[FeedRange], Option[(ThroughputProperties, Boolean)])
+ metadata: (ModelsCosmosContainerProperties, List[FeedRange], Option[(ThroughputProperties, Boolean)])
): util.HashMap[String, String] = {
- val containerProperties: CosmosContainerProperties = metadata._1
+ val containerProperties: ModelsCosmosContainerProperties = metadata._1
val feedRanges: List[FeedRange] = metadata._2
val throughputPropertiesOption: Option[(ThroughputProperties, Boolean)] = metadata._3
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/udf/GetCosmosItemIdentityValue.scala b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/udf/GetCosmosItemIdentityValue.scala
index f8c2a7b7dac9..45259f6b6a66 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/udf/GetCosmosItemIdentityValue.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/main/scala/com/azure/cosmos/spark/udf/GetCosmosItemIdentityValue.scala
@@ -20,9 +20,9 @@ class GetCosmosItemIdentityValue extends UDF2[String, Object, String] {
requireNotNull(partitionKeyValue, "partitionKeyValue")
partitionKeyValue match {
- // for subpartitions case
- case wrappedArray: mutable.WrappedArray[Any] =>
- CosmosItemIdentityHelper.getCosmosItemIdentityValueString(id, wrappedArray.map(_.asInstanceOf[Object]).toList)
+ // for subpartitions case - Seq covers both WrappedArray (Scala 2.12) and ArraySeq (Scala 2.13)
+ case seq: Seq[Any] =>
+ CosmosItemIdentityHelper.getCosmosItemIdentityValueString(id, seq.map(_.asInstanceOf[Object]).toList)
case _ => CosmosItemIdentityHelper.getCosmosItemIdentityValueString(id, List(partitionKeyValue))
}
}
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosConfigSpec.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosConfigSpec.scala
index c73bdb027fc4..0144b468582b 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosConfigSpec.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosConfigSpec.scala
@@ -252,7 +252,7 @@ class CosmosConfigSpec extends UnitSpec with BasicLoggingTrait {
case otherError: Throwable => throw otherError
}
- val userCfgMissingArmEndpoint = userConfig.toMap.filterKeys(_ != "spark.cosmos.account.azureEnvironment.mANagement")
+ val userCfgMissingArmEndpoint = userConfig.toMap.filter(_._1 != "spark.cosmos.account.azureEnvironment.mANagement")
try {
CosmosAccountConfig.parseCosmosAccountConfig(userCfgMissingArmEndpoint)
throw new IllegalStateException("Should never reach here when ARM endpoint config is missing")
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosPartitionPlannerITest.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosPartitionPlannerITest.scala
index 78262b17455f..8826c6d46fe4 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosPartitionPlannerITest.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/CosmosPartitionPlannerITest.scala
@@ -20,7 +20,7 @@ import java.util
import java.util.UUID
import java.util.concurrent.atomic.{AtomicInteger, AtomicLong}
import scala.collection.mutable.ArrayBuffer
-import scala.jdk.CollectionConverters.asScalaBufferConverter
+import scala.jdk.CollectionConverters._
class CosmosPartitionPlannerITest
extends UnitSpec
@@ -386,7 +386,7 @@ class CosmosPartitionPlannerITest
val alwaysThrow = false
partitions.foreach {
- case _: CosmosInputPartition => Unit
+ case _: CosmosInputPartition => ()
case _ => assert(alwaysThrow, "Unexpected partition type")
}
partitions should have size expectedPartitionCount
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/FilterAnalyzerSpec.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/FilterAnalyzerSpec.scala
index 659dd0782399..949384815fb1 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/FilterAnalyzerSpec.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/FilterAnalyzerSpec.scala
@@ -53,7 +53,7 @@ class FilterAnalyzerSpec extends UnitSpec {
EqualTo("physicist", "Schrodinger"), In("isCatAlive", Array(true, false)))
val analyzedQuery = filterProcessorWithoutCustomQuery.analyze(filters)
analyzedQuery.filtersNotSupportedByCosmos shouldBe empty
- analyzedQuery.filtersToBePushedDownToCosmos.toIterable should contain theSameElementsAs filters.toList
+ analyzedQuery.filtersToBePushedDownToCosmos.toArray should contain theSameElementsAs filters.toList
val query = analyzedQuery.cosmosParametrizedQuery
query.queryText shouldEqual "SELECT * FROM r WHERE r['physicist']=@param0 AND r['isCatAlive'] IN (@param1,@param2)"
@@ -223,7 +223,7 @@ class FilterAnalyzerSpec extends UnitSpec {
EqualTo("physicist", "Schrodinger"), In("isCatAlive", Array(true, false)))
val analyzedQuery = filterProcessorWithCustomQuery.analyze(filters)
analyzedQuery.filtersToBePushedDownToCosmos shouldBe empty
- analyzedQuery.filtersNotSupportedByCosmos.toIterable should contain theSameElementsAs filters.toList
+ analyzedQuery.filtersNotSupportedByCosmos.toArray should contain theSameElementsAs filters.toList
val query = analyzedQuery.cosmosParametrizedQuery
query.queryText shouldEqual queryText
@@ -238,7 +238,7 @@ class FilterAnalyzerSpec extends UnitSpec {
val analyzedFilters = filterProcessorWithoutCustomQuery.analyze(filters)
analyzedFilters.filtersToBePushedDownToCosmos shouldBe empty
- analyzedFilters.filtersNotSupportedByCosmos.toIterable should contain theSameElementsAs filters.toList
+ analyzedFilters.filtersNotSupportedByCosmos.toArray should contain theSameElementsAs filters.toList
analyzedFilters.cosmosParametrizedQuery.queryText shouldEqual QueryFilterAnalyzer.rootParameterizedQuery.queryText
analyzedFilters.readManyFiltersOpt.isDefined shouldBe false
}
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EBulkWriteITest.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EBulkWriteITest.scala
index 0ad43de74e30..a1be10199ed9 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EBulkWriteITest.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EBulkWriteITest.scala
@@ -114,7 +114,7 @@ class SparkE2EBulkWriteITest
toBeIngested += s"record_$i"
}
- val df = toBeIngested.toDF("id")
+ val df = toBeIngested.toSeq.toDF("id")
var bytesWrittenSnapshot = 0L
var recordsWrittenSnapshot = 0L
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala
index 6c82a92f59be..85d46a8e4032 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala
@@ -20,7 +20,7 @@ import java.time.Duration
import java.util.UUID
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import scala.jdk.CollectionConverters.asScalaBufferConverter
+import scala.jdk.CollectionConverters._
class SparkE2EChangeFeedITest
extends IntegrationSpec
@@ -248,7 +248,6 @@ class SparkE2EChangeFeedITest
val cosmosMasterKey = TestConfigurations.MASTER_KEY
CosmosClientMetrics.meterRegistry.isDefined shouldEqual true
- val meterRegistry = CosmosClientMetrics.meterRegistry.get
val container = cosmosClient.getDatabase(cosmosDatabase).getContainer(cosmosContainer)
val sinkContainerName = cosmosClient
@@ -535,8 +534,10 @@ class SparkE2EChangeFeedITest
val collectedFrame = groupedFrame.collect()
collectedFrame.foreach(row => {
- val wrappedArray = row.get(1).asInstanceOf[mutable.WrappedArray[String]]
- val array = wrappedArray.array
+ val array = row.get(1) match {
+ case seq: Seq[String] => seq.toArray
+ case _ => throw new IllegalArgumentException("Unexpected type of array")
+ }
row.get(0) match {
case "create" =>
validateArraysUnordered(createdObjectIds, array)
@@ -859,7 +860,7 @@ class SparkE2EChangeFeedITest
hdfs.copyToLocalFile(true, new Path(startOffsetFileLocation), new Path(startOffsetFileBackupLocation))
hdfs.exists(new Path(startOffsetFileLocation)) shouldEqual false
- var remainingFromLastBatchOfTen = 10;
+ var remainingFromLastBatchOfTen = 10
while(remainingFromLastBatchOfTen > 0) {
hdfs.copyToLocalFile(true, new Path(startOffsetFileBackupLocation), new Path(startOffsetFileLocation))
hdfs.delete(new Path(latestOffsetFileLocation), true)
diff --git a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/TestUtils.scala b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/TestUtils.scala
index 8eadf4261287..e5040944932a 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/TestUtils.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3/src/test/scala/com/azure/cosmos/spark/TestUtils.scala
@@ -25,10 +25,7 @@ import java.util.UUID
import java.util.concurrent.atomic.AtomicInteger
import javax.annotation.concurrent.NotThreadSafe
import scala.collection.mutable.ListBuffer
-import scala.jdk.CollectionConverters.iterableAsScalaIterableConverter
-// scalastyle:off underscore.import
-import scala.collection.JavaConverters._
-// scalastyle:on underscore.import
+import scala.jdk.CollectionConverters._
// extending class will have a pre-created spark session
@NotThreadSafe // marking this as not thread safe because we have to stop Spark Context in some unit tests
diff --git a/sdk/cosmos/azure-cosmos-spark_3/test-databricks/databricks-jar-install.sh b/sdk/cosmos/azure-cosmos-spark_3/test-databricks/databricks-jar-install.sh
index d361a49014ef..d0a16daa4f67 100644
--- a/sdk/cosmos/azure-cosmos-spark_3/test-databricks/databricks-jar-install.sh
+++ b/sdk/cosmos/azure-cosmos-spark_3/test-databricks/databricks-jar-install.sh
@@ -3,7 +3,9 @@
CLUSTER_NAME=$1
AVOID_DBFS=$2
JARPATH=$3
-STORAGE_ACCOUNT_KEY=$4
+STORAGE_ACCOUNT_NAME=$4
+STORAGE_ACCOUNT_KEY=$5
+JAR_NAME=$6
[[ -z "$CLUSTER_NAME" ]] && exit 1
[[ -z "$JARPATH" ]] && exit 1
@@ -45,13 +47,13 @@ echo "Avoid DBFS: $AVOID_DBFS"
# DATABRICKS_RUNTIME_VERSION is not populated in the environment and version comparison is messy in bash
# Using cluster name for the cluster that was created with 16.4
if [[ "${AVOID_DBFS,,}" == "true" ]]; then
- account=oltpsparkcijarstore
+ account=$STORAGE_ACCOUNT_NAME
- echo "Uploading jar '$JARPATH/$JARFILE' to Azure Storage account oltpsparkcijarstore (ephemeral tenant) container jarstore BLOB jars/azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar"
- az storage blob upload --account-name oltpsparkcijarstore --account-key $STORAGE_ACCOUNT_KEY --container-name jarstore --name jars/azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar --file $JARPATH/$JARFILE --type block --overwrite true --only-show-errors
+ echo "Uploading jar '$JARPATH/$JARFILE' to Azure Storage account $STORAGE_ACCOUNT_NAME (ephemeral tenant) container jarstore BLOB jars/$JAR_NAME"
+ az storage blob upload --account-name $STORAGE_ACCOUNT_NAME --account-key $STORAGE_ACCOUNT_KEY --container-name jarstore --name jars/$JAR_NAME --file $JARPATH/$JARFILE --type block --overwrite true --only-show-errors
if [ $? -eq 0 ]; then
- echo "Successfully uploaded JAR to oltpsparkcijarstore (ephemeral tenant)."
+ echo "Successfully uploaded JAR to $STORAGE_ACCOUNT_NAME (ephemeral tenant)."
echo "Rebooting cluster to install new library via init script"
else
echo "Failed to upload JAR to Workspace Files."
diff --git a/sdk/cosmos/ci.yml b/sdk/cosmos/ci.yml
index 2f094d0ec3b9..2e1c845dae37 100644
--- a/sdk/cosmos/ci.yml
+++ b/sdk/cosmos/ci.yml
@@ -18,6 +18,7 @@ trigger:
- sdk/cosmos/azure-cosmos-spark_3-4_2-12/
- sdk/cosmos/azure-cosmos-spark_3-5/
- sdk/cosmos/azure-cosmos-spark_3-5_2-12/
+ - sdk/cosmos/azure-cosmos-spark_3-5_2-13/
- sdk/cosmos/fabric-cosmos-spark-auth_3/
- sdk/cosmos/azure-cosmos-test/
- sdk/cosmos/azure-cosmos-tests/
@@ -33,6 +34,7 @@ trigger:
- sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml
- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml
- sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml
+ - sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml
- sdk/cosmos/azure-cosmos-spark_3-5/pom.xml
- sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml
- sdk/cosmos/azure-cosmos-kafka-connect/pom.xml
@@ -59,6 +61,7 @@ pr:
- sdk/cosmos/azure-cosmos-spark_3-4_2-12/
- sdk/cosmos/azure-cosmos-spark_3-5/
- sdk/cosmos/azure-cosmos-spark_3-5_2-12/
+ - sdk/cosmos/azure-cosmos-spark_3-5_2-13/
- sdk/cosmos/fabric-cosmos-spark-auth_3/
- sdk/cosmos/faq/
- sdk/cosmos/azure-cosmos-kafka-connect/
@@ -72,6 +75,7 @@ pr:
- sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml
- sdk/cosmos/azure-cosmos-spark_3-5/pom.xml
- sdk/cosmos/azure-cosmos-spark_3-5_2-12/pom.xml
+ - sdk/cosmos/azure-cosmos-spark_3-5_2-13/pom.xml
- sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml
- sdk/cosmos/azure-cosmos-test/pom.xml
- sdk/cosmos/azure-cosmos-tests/pom.xml
@@ -93,10 +97,14 @@ parameters:
displayName: 'azure-cosmos-spark_3-4_2-12'
type: boolean
default: true
- - name: release_azurecosmosspark35
+ - name: release_azurecosmosspark35_scala212
displayName: 'azure-cosmos-spark_3-5_2-12'
type: boolean
default: true
+ - name: release_azurecosmosspark35_scala213
+ displayName: 'azure-cosmos-spark_3-5_2-13'
+ type: boolean
+ default: true
- name: release_fabriccosmossparkauth3
displayName: 'fabric-cosmos-spark-auth_3'
type: boolean
@@ -140,11 +148,18 @@ extends:
releaseInBatch: ${{ parameters.release_azurecosmosspark34 }}
- name: azure-cosmos-spark_3-5_2-12
groupId: com.azure.cosmos.spark
- safeName: azurecosmosspark35
+ safeName: azurecosmosspark35scala212
+ uberJar: true
+ skipPublishDocGithubIo: true
+ skipPublishDocMs: true
+ releaseInBatch: ${{ parameters.release_azurecosmosspark35_scala212 }}
+ - name: azure-cosmos-spark_3-5_2-13
+ groupId: com.azure.cosmos.spark
+ safeName: azurecosmosspark35scala213
uberJar: true
skipPublishDocGithubIo: true
skipPublishDocMs: true
- releaseInBatch: ${{ parameters.release_azurecosmosspark35 }}
+ releaseInBatch: ${{ parameters.release_azurecosmosspark35_scala213 }}
- name: fabric-cosmos-spark-auth_3
groupId: com.azure.cosmos.spark
safeName: fabriccosmossparkauth3
diff --git a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml
index 94d7f17c86ad..45524afbe974 100644
--- a/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml
+++ b/sdk/cosmos/fabric-cosmos-spark-auth_3/pom.xml
@@ -176,7 +176,7 @@
org.slf4j:slf4j-api:[1.7.36]
org.scala-lang:scala-library:[2.12.19]
- org.scala-lang.modules:scala-java8-compat_2.12:[0.8.0]
+ org.scala-lang.modules:scala-java8-compat_2.12:[0.9.1]
org.scalatest:scalatest_2.12:[3.2.2]
org.apache.maven.plugins:maven-antrun-plugin:[3.1.0]
org.scalastyle:scalastyle-maven-plugin:[1.0.0]
diff --git a/sdk/cosmos/pom.xml b/sdk/cosmos/pom.xml
index c486b2fe9849..81b2db7ca53e 100644
--- a/sdk/cosmos/pom.xml
+++ b/sdk/cosmos/pom.xml
@@ -18,6 +18,7 @@
azure-cosmos-spark_3-4_2-12
azure-cosmos-spark_3-5
azure-cosmos-spark_3-5_2-12
+ azure-cosmos-spark_3-5_2-13
azure-cosmos-test
azure-cosmos-tests
azure-cosmos-kafka-connect
diff --git a/sdk/cosmos/spark.databricks.yml b/sdk/cosmos/spark.databricks.yml
index 50b10d49190f..e7044941e9c9 100644
--- a/sdk/cosmos/spark.databricks.yml
+++ b/sdk/cosmos/spark.databricks.yml
@@ -34,6 +34,10 @@ parameters:
type: string
- name: JarReadOnlySasUri
type: string
+ - name: JarStorageAccountName
+ type: string
+ - name: JarName
+ type: string
stages:
- stage:
displayName: 'Spark Databricks integration ${{ parameters.ClusterName }} - ${{ parameters.SparkVersion }}'
@@ -91,7 +95,7 @@ stages:
displayName: Importing Jars
inputs:
filePath: $(build.sourcesdirectory)/sdk/cosmos/azure-cosmos-spark_3/test-databricks/databricks-jar-install.sh
- arguments: '${{ parameters.ClusterName }} ${{ parameters.AvoidDBFS }} $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/target ${{ parameters.JarStorageAccountKey }}'
+ arguments: '${{ parameters.ClusterName }} ${{ parameters.AvoidDBFS }} $(build.sourcesdirectory)/sdk/cosmos/${{ parameters.SparkVersion }}/target ${{ parameters.JarStorageAccountName }} ${{ parameters.JarStorageAccountKey }} ${{ parameters.JarName }}'
- task: Bash@3
displayName: Importing and executing notebooks
inputs:
@@ -135,6 +139,6 @@ stages:
fi
env:
JAR_URL: '${{ parameters.JarReadOnlySasUri }}'
- JAR_NAME: 'azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar'
+ JAR_NAME: '${{ parameters.SparkVersion }}-latest-ci-candidate.jar'
JAR_CHECK_SUM: $(JarCheckSum)
AVOID_DBFS: ${{ parameters.AvoidDBFS }}
diff --git a/sdk/cosmos/spark.yml b/sdk/cosmos/spark.yml
index 465a66078343..0dcfd657d0ae 100644
--- a/sdk/cosmos/spark.yml
+++ b/sdk/cosmos/spark.yml
@@ -26,6 +26,8 @@ stages:
ClusterName: 'oltp-ci-spark33-2workers-ds3v2'
JarStorageAccountKey: $(spark-databricks-cosmos-spn-clientIdCert)
JarReadOnlySasUri: $(spark-databricks-cosmos-spn-clientCertBase64)
+ JarStorageAccountName: 'oltpsparkcijarstore1225'
+ JarName: 'azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar'
- template: /sdk/cosmos/spark.databricks.yml
parameters:
CosmosEndpointMsi: $(spark-databricks-cosmos-endpoint-msi)
@@ -44,6 +46,8 @@ stages:
ClusterName: 'oltp-ci-spark34-2workers-ds3v2'
JarStorageAccountKey: $(spark-databricks-cosmos-spn-clientIdCert)
JarReadOnlySasUri: $(spark-databricks-cosmos-spn-clientCertBase64)
+ JarStorageAccountName: 'oltpsparkcijarstore1225'
+ JarName: 'azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar'
- template: /sdk/cosmos/spark.databricks.yml
parameters:
CosmosEndpointMsi: $(spark-databricks-cosmos-endpoint-msi)
@@ -63,6 +67,8 @@ stages:
AvoidDBFS: false
JarStorageAccountKey: $(spark-databricks-cosmos-spn-clientIdCert)
JarReadOnlySasUri: $(spark-databricks-cosmos-spn-clientCertBase64)
+ JarStorageAccountName: 'oltpsparkcijarstore1225'
+ JarName: 'azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar'
- template: /sdk/cosmos/spark.databricks.yml
parameters:
CosmosEndpointMsi: $(spark-databricks-cosmos-endpoint-msi)
@@ -82,4 +88,26 @@ stages:
AvoidDBFS: true
JarStorageAccountKey: $(spark-databricks-cosmos-spn-clientIdCert)
JarReadOnlySasUri: $(spark-databricks-cosmos-spn-clientCertBase64)
-
+ JarStorageAccountName: 'oltpsparkcijarstore1225'
+ JarName: 'azure-cosmos-spark_3-5_2-12-latest-ci-candidate.jar'
+ - template: /sdk/cosmos/spark.databricks.yml
+ parameters:
+ CosmosEndpointMsi: $(spark-databricks-cosmos-endpoint-msi)
+ CosmosEndpoint: $(spark-databricks-cosmos-endpoint)
+ CosmosKey: $(spark-databricks-cosmos-key)
+ DatabricksEndpoint: $(spark-databricks-endpoint-with-msi)
+ SubscriptionId: '8fba6d4f-7c37-4d13-9063-fd58ad2b86e2'
+ TenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47'
+ ResourceGroupName: 'oltp-spark-ci'
+ ClientId: $(spark-databricks-cosmos-spn-clientId)
+ ClientSecret: $(spark-databricks-cosmos-spn-clientSecret)
+ CosmosContainerName: 'sampleContainer7'
+ CosmosDatabaseName: 'sampleDB7'
+ DatabricksToken: $(spark-databricks-token-with-msi)
+ SparkVersion: 'azure-cosmos-spark_3-5_2-13'
+ ClusterName: 'oltp-ci-spark35-2workers-ds3v2-16.4-scala_2.13'
+ AvoidDBFS: true
+ JarStorageAccountKey: $(spark-databricks-cosmos-spn-clientIdCert)
+ JarReadOnlySasUri: $(spark-databricks-token)
+ JarStorageAccountName: 'oltpsparkcijarstore1225'
+ JarName: 'azure-cosmos-spark_3-5_2-13-latest-ci-candidate.jar'