@@ -145,6 +145,8 @@ public actor SparkConnectClient {
145
145
throw SparkConnectError . InvalidViewName
146
146
case let m where m. contains ( " DATA_SOURCE_NOT_FOUND " ) :
147
147
throw SparkConnectError . DataSourceNotFound
148
+ case let m where m. contains ( " DATASET_TYPE_UNSPECIFIED " ) :
149
+ throw SparkConnectError . DatasetTypeUnspecified
148
150
default :
149
151
throw error
150
152
}
@@ -1237,6 +1239,39 @@ public actor SparkConnectClient {
1237
1239
}
1238
1240
}
1239
1241
1242
+ @discardableResult
1243
+ func defineDataset(
1244
+ _ dataflowGraphID: String ,
1245
+ _ datasetName: String ,
1246
+ _ datasetType: String ,
1247
+ _ comment: String ? = nil
1248
+ ) async throws -> Bool {
1249
+ try await withGPRC { client in
1250
+ if UUID ( uuidString: dataflowGraphID) == nil {
1251
+ throw SparkConnectError . InvalidArgument
1252
+ }
1253
+
1254
+ var defineDataset = Spark_Connect_PipelineCommand . DefineDataset ( )
1255
+ defineDataset. dataflowGraphID = dataflowGraphID
1256
+ defineDataset. datasetName = datasetName
1257
+ defineDataset. datasetType = datasetType. toDatasetType
1258
+ if let comment {
1259
+ defineDataset. comment = comment
1260
+ }
1261
+
1262
+ var pipelineCommand = Spark_Connect_PipelineCommand ( )
1263
+ pipelineCommand. commandType = . defineDataset( defineDataset)
1264
+
1265
+ var command = Spark_Connect_Command ( )
1266
+ command. commandType = . pipelineCommand( pipelineCommand)
1267
+
1268
+ let responses = try await execute ( self . sessionID!, command)
1269
+ return responses. contains {
1270
+ $0. responseType == . pipelineCommandResult( Spark_Connect_PipelineCommandResult ( ) )
1271
+ }
1272
+ }
1273
+ }
1274
+
1240
1275
private enum URIParams {
1241
1276
static let PARAM_GRPC_MAX_MESSAGE_SIZE = " grpc_max_message_size "
1242
1277
static let PARAM_SESSION_ID = " session_id "
0 commit comments