diff --git a/Sources/SparkConnect/DataFrame.swift b/Sources/SparkConnect/DataFrame.swift index 760ece3..e52226a 100644 --- a/Sources/SparkConnect/DataFrame.swift +++ b/Sources/SparkConnect/DataFrame.swift @@ -489,13 +489,8 @@ public actor DataFrame: Sendable { /// - Parameter schema: The given schema. /// - Returns: A ``DataFrame`` with the given schema. public func to(_ schema: String) async throws -> DataFrame { - // Validate by parsing. - do { - let dataType = try await sparkSession.client.ddlParse(schema) - return DataFrame(spark: self.spark, plan: SparkConnectClient.getToSchema(self.plan.root, dataType)) - } catch { - throw SparkConnectError.InvalidTypeException - } + let dataType = try await sparkSession.client.ddlParse(schema) + return DataFrame(spark: self.spark, plan: SparkConnectClient.getToSchema(self.plan.root, dataType)) } /// Returns the content of the Dataset as a Dataset of JSON strings. diff --git a/Sources/SparkConnect/DataFrameReader.swift b/Sources/SparkConnect/DataFrameReader.swift index 58567db..274efdf 100644 --- a/Sources/SparkConnect/DataFrameReader.swift +++ b/Sources/SparkConnect/DataFrameReader.swift @@ -123,12 +123,7 @@ public actor DataFrameReader: Sendable { /// - Returns: A ``DataFrameReader``. @discardableResult public func schema(_ schema: String) async throws -> DataFrameReader { - // Validate by parsing. - do { - try await sparkSession.client.ddlParse(schema) - } catch { - throw SparkConnectError.InvalidTypeException - } + try await sparkSession.client.ddlParse(schema) self.userSpecifiedSchemaDDL = schema return self } diff --git a/Sources/SparkConnect/DataStreamReader.swift b/Sources/SparkConnect/DataStreamReader.swift index e90a1c1..da87505 100644 --- a/Sources/SparkConnect/DataStreamReader.swift +++ b/Sources/SparkConnect/DataStreamReader.swift @@ -50,12 +50,7 @@ public actor DataStreamReader: Sendable { /// - Returns: A ``DataStreamReader``. @discardableResult public func schema(_ schema: String) async throws -> DataStreamReader { - // Validate by parsing. - do { - try await sparkSession.client.ddlParse(schema) - } catch { - throw SparkConnectError.InvalidTypeException - } + try await sparkSession.client.ddlParse(schema) self.userSpecifiedSchemaDDL = schema return self } diff --git a/Sources/SparkConnect/SparkConnectClient.swift b/Sources/SparkConnect/SparkConnectClient.swift index 5c6b4cc..f8a0073 100644 --- a/Sources/SparkConnect/SparkConnectClient.swift +++ b/Sources/SparkConnect/SparkConnectClient.swift @@ -781,8 +781,17 @@ public actor SparkConnectClient { ddlParse.ddlString = ddlString return OneOf_Analyze.ddlParse(ddlParse) }) - let response = try await service.analyzePlan(request) - return response.ddlParse.parsed + do { + let response = try await service.analyzePlan(request) + return response.ddlParse.parsed + } catch let error as RPCError where error.code == .internalError { + switch error.message { + case let m where m.contains("UNSUPPORTED_DATATYPE") || m.contains("INVALID_IDENTIFIER"): + throw SparkConnectError.InvalidTypeException + default: + throw error + } + } } }