Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 39 additions & 7 deletions Sources/SparkConnect/DataFrameWriter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -113,16 +113,48 @@ public actor DataFrameWriter: Sendable {
}

private func saveInternal(_ path: String?) async throws {
var write = WriteOperation()
try await executeWriteOperation({
var write = WriteOperation()
if let path = path {
write.path = path
}
return write
})
}

/// Saves the content of the ``DataFrame`` as the specified table.
/// - Parameter tableName: A table name.
public func saveAsTable(_ tableName: String) async throws {
try await executeWriteOperation({
var write = WriteOperation()
write.table.tableName = tableName
write.table.saveMethod = .saveAsTable
return write
})
}

/// Inserts the content of the ``DataFrame`` to the specified table. It requires that the schema of
/// the ``DataFrame`` is the same as the schema of the table. Unlike ``saveAsTable``,
/// ``insertInto`` ignores the column names and just uses position-based resolution.
/// - Parameter tableName: A table name.
public func insertInto(_ tableName: String) async throws {
try await executeWriteOperation({
var write = WriteOperation()
write.table.tableName = tableName
write.table.saveMethod = .insertInto
return write
})
}

private func executeWriteOperation(_ f: () -> WriteOperation) async throws {
var write = f()

// Cannot both be set
assert(!(!write.path.isEmpty && !write.table.tableName.isEmpty))

let plan = await self.df.getPlan() as! Plan
write.input = plan.root
write.mode = self.saveMode.toSaveMode
if let path = path {
write.path = path
}

// Cannot both be set
// require(!(builder.hasPath && builder.hasTable))

if let source = self.source {
write.source = source
Expand Down
43 changes: 43 additions & 0 deletions Tests/SparkConnectTests/DataFrameWriterTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,49 @@ struct DataFrameWriterTests {
await spark.stop()
}

@Test
func saveAsTable() async throws {
let spark = try await SparkSession.builder.getOrCreate()
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName)({
try await spark.range(1).write.saveAsTable(tableName)
#expect(try await spark.read.table(tableName).count() == 1)

try await #require(throws: Error.self) {
try await spark.range(1).write.saveAsTable(tableName)
}

try await spark.range(1).write.mode("overwrite").saveAsTable(tableName)
#expect(try await spark.read.table(tableName).count() == 1)

try await spark.range(1).write.mode("append").saveAsTable(tableName)
#expect(try await spark.read.table(tableName).count() == 2)
})
await spark.stop()
}

@Test
func insertInto() async throws {
let spark = try await SparkSession.builder.getOrCreate()
let tableName = "TABLE_" + UUID().uuidString.replacingOccurrences(of: "-", with: "")
try await SQLHelper.withTable(spark, tableName)({
// Table doesn't exist.
try await #require(throws: Error.self) {
try await spark.range(1).write.insertInto(tableName)
}

try await spark.range(1).write.saveAsTable(tableName)
#expect(try await spark.read.table(tableName).count() == 1)

try await spark.range(1).write.insertInto(tableName)
#expect(try await spark.read.table(tableName).count() == 2)

try await spark.range(1).write.insertInto(tableName)
#expect(try await spark.read.table(tableName).count() == 3)
})
await spark.stop()
}

@Test
func partitionBy() async throws {
let tmpDir = "/tmp/" + UUID().uuidString
Expand Down
Loading