Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DESCRIBE DATABASE default
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[["Catalog Name","spark_catalog"],["Namespace Name","default"],["Comment","default database"],["Location","file:\/opt\/spark\/work-dir\/spark-warehouse"],["Owner","185"]]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DESCRIBE TABLE testcache
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[["col","int",null]]
1 change: 1 addition & 0 deletions Tests/SparkConnectTests/Resources/queries/explain.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EXPLAIN EXTENDED select k, sum(v) from values (1, 2), (1, 3) t(k, v) group by k
1 change: 1 addition & 0 deletions Tests/SparkConnectTests/Resources/queries/explain.sql.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[["== Parsed Logical Plan ==\n'Aggregate ['k], ['k, unresolvedalias('sum('v))]\n+- SubqueryAlias t\n +- LocalRelation [k#, v#]\n\n== Analyzed Logical Plan ==\nk: int, sum(v): bigint\nAggregate [k#], [k#, sum(v#) AS sum(v)#]\n+- SubqueryAlias t\n +- LocalRelation [k#, v#]\n\n== Optimized Logical Plan ==\nAggregate [k#], [k#, sum(v#) AS sum(v)#]\n+- LocalRelation [k#, v#]\n\n== Physical Plan ==\nAdaptiveSparkPlan isFinalPlan=false\n+- HashAggregate(keys=[k#], functions=[sum(v#)], output=[k#, sum(v)#])\n +- Exchange hashpartitioning(k#, 200), ENSURE_REQUIREMENTS, [plan_id=]\n +- HashAggregate(keys=[k#], functions=[partial_sum(v#)], output=[k#, sum#])\n +- LocalTableScan [k#, v#]\n"]]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SHOW DATABASES
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[["default"]]
1 change: 1 addition & 0 deletions Tests/SparkConnectTests/Resources/queries/show_tables.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SHOW TABLES
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[["","testcache","true"]]
20 changes: 18 additions & 2 deletions Tests/SparkConnectTests/SQLTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,22 @@ struct SQLTests {
let path = Bundle.module.path(forResource: "queries", ofType: "")!
let encoder = JSONEncoder()

let regexID = /#\d+L?/
let regexPlanId = /plan_id=\d+/

private func removeID(_ str: String) -> String {
return str.replacing(regexPlanId, with: "plan_id=").replacing(regexID, with: "#")
}

@Test
func testRemoveID() {
#expect(removeID("123") == "123")
#expect(removeID("123L") == "123L")
#expect(removeID("#123") == "#")
#expect(removeID("#123L") == "#")
#expect(removeID("plan_id=123") == "plan_id=")
}

#if !os(Linux)
@Test
func runAll() async throws {
Expand All @@ -38,8 +54,8 @@ struct SQLTests {

let sql = try String(contentsOf: URL(fileURLWithPath: "\(path)/\(name)"), encoding: .utf8)
let jsonData = try encoder.encode(try await spark.sql(sql).collect())
let answer = String(data: jsonData, encoding: .utf8)!
let expected = try String(contentsOf: URL(fileURLWithPath: "\(path)/\(name).json"), encoding: .utf8)
let answer = removeID(String(data: jsonData, encoding: .utf8)!)
let expected = removeID(try String(contentsOf: URL(fileURLWithPath: "\(path)/\(name).json"), encoding: .utf8))
#expect(answer == expected.trimmingCharacters(in: .whitespacesAndNewlines))
}
await spark.stop()
Expand Down
Loading