Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 85 additions & 0 deletions Tests/SparkConnectTests/ConstraintTests.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//

import Foundation
import SparkConnect
import Testing

/// A test suite for new syntaxes from SPARK-51207 (SPIP: Constraints in DSv2)
/// For now, only syntax test is here because Apache Spark 4.1 and the corresponding Apache Iceberg is not released yet.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Existing parser can parse these syntaxes?

Copy link
Member Author

@dongjoon-hyun dongjoon-hyun Jul 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you for review. Only 4.1.0-preveiw1 support this.

 $ swift test --filter ConstraintTests
...
􀟈  Suite ConstraintTests started.
􀟈  Test primary_key() started.
􁁛  Test primary_key() passed after 0.118 seconds.
􀟈  Test foreign_key() started.
􁁛  Test foreign_key() passed after 0.059 seconds.
􀟈  Test unique() started.
􁁛  Test unique() passed after 0.088 seconds.
􀟈  Test check() started.
􁁛  Test check() passed after 0.087 seconds.
􁁛  Suite ConstraintTests passed after 0.354 seconds.
􁁛  Test run with 4 tests passed after 0.354 seconds.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member

@viirya viirya Jul 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yea, because you said only syntax test is here because Apache Spark 4.1 is not released yet, it makes me think that prior Apache Spark 4.1 supports only the syntax and Apache Spark 4.1 begins to support the feature fully. 🙂

@Suite(.serialized)
struct ConstraintTests {

@Test
func primary_key() async throws {
let spark = try await SparkSession.builder.getOrCreate()
if await spark.version.starts(with: "4.1") {
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName)({
try await spark.sql("CREATE TABLE \(tableName)(a INT, PRIMARY KEY(a)) USING ORC").count()
try await spark.sql("INSERT INTO \(tableName) VALUES (1), (2)").count()
})
}
await spark.stop()
}

@Test
func foreign_key() async throws {
let spark = try await SparkSession.builder.getOrCreate()
if await spark.version.starts(with: "4.1") {
let tableName1 = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
let tableName2 = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName1, tableName2)({
try await spark.sql("CREATE TABLE \(tableName1)(id INT) USING ORC").count()
try await spark.sql(
"CREATE TABLE \(tableName2)(fk INT, FOREIGN KEY(fk) REFERENCES \(tableName2)(id)) USING ORC"
).count()
})
}
await spark.stop()
}

@Test
func unique() async throws {
let spark = try await SparkSession.builder.getOrCreate()
if await spark.version.starts(with: "4.1") {
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName)({
try await spark.sql("CREATE TABLE \(tableName)(a INT UNIQUE) USING ORC").count()
try await spark.sql("INSERT INTO \(tableName) VALUES (1), (2)").count()
})
}
await spark.stop()
}

@Test
func check() async throws {
let spark = try await SparkSession.builder.getOrCreate()
if await spark.version.starts(with: "4.1") {
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName)({
try await spark.sql(
"CREATE TABLE \(tableName)(a INT, CONSTRAINT c1 CHECK (a > 0)) USING ORC"
).count()
try await spark.sql("INSERT INTO \(tableName) VALUES (-1)").count()
})
}
await spark.stop()
}
}