@@ -3979,7 +3979,7 @@ object functions {
3979
3979
|def udf[$typeTags](f: Function$x[$types]): UserDefinedFunction = {
3980
3980
| val ScalaReflection.Schema(dataType, nullable) = ScalaReflection.schemaFor[RT]
3981
3981
| val inputSchemas = $inputSchemas
3982
- | val udf = SparkUserDefinedFunction(f, dataType, inputSchemas)
3982
+ | val udf = SparkUserDefinedFunction.create (f, dataType, inputSchemas)
3983
3983
| if (nullable) udf else udf.asNonNullable()
3984
3984
|}""".stripMargin)
3985
3985
}
@@ -4002,7 +4002,7 @@ object functions {
4002
4002
| */
4003
4003
|def udf(f: UDF$i[$extTypeArgs], returnType: DataType): UserDefinedFunction = {
4004
4004
| val func = f$anyCast.call($anyParams)
4005
- | SparkUserDefinedFunction($funcCall, returnType, inputSchemas = Seq.fill($i)(None))
4005
+ | SparkUserDefinedFunction.create ($funcCall, returnType, inputSchemas = Seq.fill($i)(None))
4006
4006
|}""".stripMargin)
4007
4007
}
4008
4008
@@ -4024,7 +4024,7 @@ object functions {
4024
4024
def udf [RT : TypeTag ](f : Function0 [RT ]): UserDefinedFunction = {
4025
4025
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4026
4026
val inputSchemas = Nil
4027
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4027
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4028
4028
if (nullable) udf else udf.asNonNullable()
4029
4029
}
4030
4030
@@ -4040,7 +4040,7 @@ object functions {
4040
4040
def udf [RT : TypeTag , A1 : TypeTag ](f : Function1 [A1 , RT ]): UserDefinedFunction = {
4041
4041
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4042
4042
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Nil
4043
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4043
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4044
4044
if (nullable) udf else udf.asNonNullable()
4045
4045
}
4046
4046
@@ -4056,7 +4056,7 @@ object functions {
4056
4056
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag ](f : Function2 [A1 , A2 , RT ]): UserDefinedFunction = {
4057
4057
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4058
4058
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Nil
4059
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4059
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4060
4060
if (nullable) udf else udf.asNonNullable()
4061
4061
}
4062
4062
@@ -4072,7 +4072,7 @@ object functions {
4072
4072
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag ](f : Function3 [A1 , A2 , A3 , RT ]): UserDefinedFunction = {
4073
4073
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4074
4074
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Nil
4075
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4075
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4076
4076
if (nullable) udf else udf.asNonNullable()
4077
4077
}
4078
4078
@@ -4088,7 +4088,7 @@ object functions {
4088
4088
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag ](f : Function4 [A1 , A2 , A3 , A4 , RT ]): UserDefinedFunction = {
4089
4089
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4090
4090
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Nil
4091
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4091
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4092
4092
if (nullable) udf else udf.asNonNullable()
4093
4093
}
4094
4094
@@ -4104,7 +4104,7 @@ object functions {
4104
4104
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag ](f : Function5 [A1 , A2 , A3 , A4 , A5 , RT ]): UserDefinedFunction = {
4105
4105
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4106
4106
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Nil
4107
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4107
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4108
4108
if (nullable) udf else udf.asNonNullable()
4109
4109
}
4110
4110
@@ -4120,7 +4120,7 @@ object functions {
4120
4120
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag , A6 : TypeTag ](f : Function6 [A1 , A2 , A3 , A4 , A5 , A6 , RT ]): UserDefinedFunction = {
4121
4121
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4122
4122
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A6 ])).toOption :: Nil
4123
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4123
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4124
4124
if (nullable) udf else udf.asNonNullable()
4125
4125
}
4126
4126
@@ -4136,7 +4136,7 @@ object functions {
4136
4136
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag , A6 : TypeTag , A7 : TypeTag ](f : Function7 [A1 , A2 , A3 , A4 , A5 , A6 , A7 , RT ]): UserDefinedFunction = {
4137
4137
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4138
4138
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A6 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A7 ])).toOption :: Nil
4139
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4139
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4140
4140
if (nullable) udf else udf.asNonNullable()
4141
4141
}
4142
4142
@@ -4152,7 +4152,7 @@ object functions {
4152
4152
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag , A6 : TypeTag , A7 : TypeTag , A8 : TypeTag ](f : Function8 [A1 , A2 , A3 , A4 , A5 , A6 , A7 , A8 , RT ]): UserDefinedFunction = {
4153
4153
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4154
4154
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A6 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A7 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A8 ])).toOption :: Nil
4155
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4155
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4156
4156
if (nullable) udf else udf.asNonNullable()
4157
4157
}
4158
4158
@@ -4168,7 +4168,7 @@ object functions {
4168
4168
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag , A6 : TypeTag , A7 : TypeTag , A8 : TypeTag , A9 : TypeTag ](f : Function9 [A1 , A2 , A3 , A4 , A5 , A6 , A7 , A8 , A9 , RT ]): UserDefinedFunction = {
4169
4169
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4170
4170
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A6 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A7 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A8 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A9 ])).toOption :: Nil
4171
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4171
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4172
4172
if (nullable) udf else udf.asNonNullable()
4173
4173
}
4174
4174
@@ -4184,7 +4184,7 @@ object functions {
4184
4184
def udf [RT : TypeTag , A1 : TypeTag , A2 : TypeTag , A3 : TypeTag , A4 : TypeTag , A5 : TypeTag , A6 : TypeTag , A7 : TypeTag , A8 : TypeTag , A9 : TypeTag , A10 : TypeTag ](f : Function10 [A1 , A2 , A3 , A4 , A5 , A6 , A7 , A8 , A9 , A10 , RT ]): UserDefinedFunction = {
4185
4185
val ScalaReflection .Schema (dataType, nullable) = ScalaReflection .schemaFor[RT ]
4186
4186
val inputSchemas = Try (ScalaReflection .schemaFor(typeTag[A1 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A2 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A3 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A4 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A5 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A6 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A7 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A8 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A9 ])).toOption :: Try (ScalaReflection .schemaFor(typeTag[A10 ])).toOption :: Nil
4187
- val udf = SparkUserDefinedFunction (f, dataType, inputSchemas)
4187
+ val udf = SparkUserDefinedFunction .create (f, dataType, inputSchemas)
4188
4188
if (nullable) udf else udf.asNonNullable()
4189
4189
}
4190
4190
@@ -4203,7 +4203,7 @@ object functions {
4203
4203
*/
4204
4204
def udf (f : UDF0 [_], returnType : DataType ): UserDefinedFunction = {
4205
4205
val func = f.asInstanceOf [UDF0 [Any ]].call()
4206
- SparkUserDefinedFunction (() => func, returnType, inputSchemas = Seq .fill(0 )(None ))
4206
+ SparkUserDefinedFunction .create (() => func, returnType, inputSchemas = Seq .fill(0 )(None ))
4207
4207
}
4208
4208
4209
4209
/**
@@ -4217,7 +4217,7 @@ object functions {
4217
4217
*/
4218
4218
def udf (f : UDF1 [_, _], returnType : DataType ): UserDefinedFunction = {
4219
4219
val func = f.asInstanceOf [UDF1 [Any , Any ]].call(_ : Any )
4220
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(1 )(None ))
4220
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(1 )(None ))
4221
4221
}
4222
4222
4223
4223
/**
@@ -4231,7 +4231,7 @@ object functions {
4231
4231
*/
4232
4232
def udf (f : UDF2 [_, _, _], returnType : DataType ): UserDefinedFunction = {
4233
4233
val func = f.asInstanceOf [UDF2 [Any , Any , Any ]].call(_ : Any , _ : Any )
4234
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(2 )(None ))
4234
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(2 )(None ))
4235
4235
}
4236
4236
4237
4237
/**
@@ -4245,7 +4245,7 @@ object functions {
4245
4245
*/
4246
4246
def udf (f : UDF3 [_, _, _, _], returnType : DataType ): UserDefinedFunction = {
4247
4247
val func = f.asInstanceOf [UDF3 [Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any )
4248
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(3 )(None ))
4248
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(3 )(None ))
4249
4249
}
4250
4250
4251
4251
/**
@@ -4259,7 +4259,7 @@ object functions {
4259
4259
*/
4260
4260
def udf (f : UDF4 [_, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4261
4261
val func = f.asInstanceOf [UDF4 [Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any )
4262
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(4 )(None ))
4262
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(4 )(None ))
4263
4263
}
4264
4264
4265
4265
/**
@@ -4273,7 +4273,7 @@ object functions {
4273
4273
*/
4274
4274
def udf (f : UDF5 [_, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4275
4275
val func = f.asInstanceOf [UDF5 [Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4276
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(5 )(None ))
4276
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(5 )(None ))
4277
4277
}
4278
4278
4279
4279
/**
@@ -4287,7 +4287,7 @@ object functions {
4287
4287
*/
4288
4288
def udf (f : UDF6 [_, _, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4289
4289
val func = f.asInstanceOf [UDF6 [Any , Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4290
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(6 )(None ))
4290
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(6 )(None ))
4291
4291
}
4292
4292
4293
4293
/**
@@ -4301,7 +4301,7 @@ object functions {
4301
4301
*/
4302
4302
def udf (f : UDF7 [_, _, _, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4303
4303
val func = f.asInstanceOf [UDF7 [Any , Any , Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4304
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(7 )(None ))
4304
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(7 )(None ))
4305
4305
}
4306
4306
4307
4307
/**
@@ -4315,7 +4315,7 @@ object functions {
4315
4315
*/
4316
4316
def udf (f : UDF8 [_, _, _, _, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4317
4317
val func = f.asInstanceOf [UDF8 [Any , Any , Any , Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4318
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(8 )(None ))
4318
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(8 )(None ))
4319
4319
}
4320
4320
4321
4321
/**
@@ -4329,7 +4329,7 @@ object functions {
4329
4329
*/
4330
4330
def udf (f : UDF9 [_, _, _, _, _, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4331
4331
val func = f.asInstanceOf [UDF9 [Any , Any , Any , Any , Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4332
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(9 )(None ))
4332
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(9 )(None ))
4333
4333
}
4334
4334
4335
4335
/**
@@ -4343,7 +4343,7 @@ object functions {
4343
4343
*/
4344
4344
def udf (f : UDF10 [_, _, _, _, _, _, _, _, _, _, _], returnType : DataType ): UserDefinedFunction = {
4345
4345
val func = f.asInstanceOf [UDF10 [Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any ]].call(_ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any , _ : Any )
4346
- SparkUserDefinedFunction (func, returnType, inputSchemas = Seq .fill(10 )(None ))
4346
+ SparkUserDefinedFunction .create (func, returnType, inputSchemas = Seq .fill(10 )(None ))
4347
4347
}
4348
4348
4349
4349
// scalastyle:on parameter.number
@@ -4362,7 +4362,9 @@ object functions {
4362
4362
* @since 2.0.0
4363
4363
*/
4364
4364
def udf (f : AnyRef , dataType : DataType ): UserDefinedFunction = {
4365
- SparkUserDefinedFunction (f, dataType, inputSchemas = Nil )
4365
+ // TODO: should call SparkUserDefinedFunction.create() instead but inputSchemas is currently
4366
+ // unavailable. We may need to create type-safe overloaded versions of udf() methods.
4367
+ SparkUserDefinedFunction (f, dataType, inputTypes = None , nullableTypes = None )
4366
4368
}
4367
4369
4368
4370
/**
0 commit comments