Skip to content

Commit ffdb2ea

Browse files
committed
[SPARK-52847] Add ConstraintTests
### What changes were proposed in this pull request? This PR aims to add `ConstraintTests`. ### Why are the changes needed? To be ready for SPARK-51207 (SPIP: Constraints in DSv2) of Apache Spark 4.1. - apache/spark#50496 ### Does this PR introduce _any_ user-facing change? No. This is a test suite addition. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #215 from dongjoon-hyun/SPARK-52847. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent f5f01cd commit ffdb2ea

File tree

1 file changed

+85
-0
lines changed

1 file changed

+85
-0
lines changed
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
//
2+
// Licensed to the Apache Software Foundation (ASF) under one
3+
// or more contributor license agreements. See the NOTICE file
4+
// distributed with this work for additional information
5+
// regarding copyright ownership. The ASF licenses this file
6+
// to you under the Apache License, Version 2.0 (the
7+
// "License"); you may not use this file except in compliance
8+
// with the License. You may obtain a copy of the License at
9+
//
10+
// http://www.apache.org/licenses/LICENSE-2.0
11+
//
12+
// Unless required by applicable law or agreed to in writing,
13+
// software distributed under the License is distributed on an
14+
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
// KIND, either express or implied. See the License for the
16+
// specific language governing permissions and limitations
17+
// under the License.
18+
//
19+
20+
import Foundation
21+
import SparkConnect
22+
import Testing
23+
24+
/// A test suite for new syntaxes from SPARK-51207 (SPIP: Constraints in DSv2)
25+
/// For now, only syntax test is here because Apache Spark 4.1 and the corresponding Apache Iceberg is not released yet.
26+
@Suite(.serialized)
27+
struct ConstraintTests {
28+
29+
@Test
30+
func primary_key() async throws {
31+
let spark = try await SparkSession.builder.getOrCreate()
32+
if await spark.version.starts(with: "4.1") {
33+
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
34+
try await SQLHelper.withTable(spark, tableName)({
35+
try await spark.sql("CREATE TABLE \(tableName)(a INT, PRIMARY KEY(a)) USING ORC").count()
36+
try await spark.sql("INSERT INTO \(tableName) VALUES (1), (2)").count()
37+
})
38+
}
39+
await spark.stop()
40+
}
41+
42+
@Test
43+
func foreign_key() async throws {
44+
let spark = try await SparkSession.builder.getOrCreate()
45+
if await spark.version.starts(with: "4.1") {
46+
let tableName1 = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
47+
let tableName2 = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
48+
try await SQLHelper.withTable(spark, tableName1, tableName2)({
49+
try await spark.sql("CREATE TABLE \(tableName1)(id INT) USING ORC").count()
50+
try await spark.sql(
51+
"CREATE TABLE \(tableName2)(fk INT, FOREIGN KEY(fk) REFERENCES \(tableName2)(id)) USING ORC"
52+
).count()
53+
})
54+
}
55+
await spark.stop()
56+
}
57+
58+
@Test
59+
func unique() async throws {
60+
let spark = try await SparkSession.builder.getOrCreate()
61+
if await spark.version.starts(with: "4.1") {
62+
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
63+
try await SQLHelper.withTable(spark, tableName)({
64+
try await spark.sql("CREATE TABLE \(tableName)(a INT UNIQUE) USING ORC").count()
65+
try await spark.sql("INSERT INTO \(tableName) VALUES (1), (2)").count()
66+
})
67+
}
68+
await spark.stop()
69+
}
70+
71+
@Test
72+
func check() async throws {
73+
let spark = try await SparkSession.builder.getOrCreate()
74+
if await spark.version.starts(with: "4.1") {
75+
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
76+
try await SQLHelper.withTable(spark, tableName)({
77+
try await spark.sql(
78+
"CREATE TABLE \(tableName)(a INT, CONSTRAINT c1 CHECK (a > 0)) USING ORC"
79+
).count()
80+
try await spark.sql("INSERT INTO \(tableName) VALUES (-1)").count()
81+
})
82+
}
83+
await spark.stop()
84+
}
85+
}

0 commit comments

Comments
 (0)