@@ -147,8 +147,8 @@ imported JAVASCRIPT libraries.`,
147147 "language" : {
148148 Type : schema .TypeString ,
149149 Optional : true ,
150- ValidateFunc : verify .ValidateEnum ([]string {"SQL" , "JAVASCRIPT" , "" }),
151- Description : `The language of the routine. Possible values: ["SQL", "JAVASCRIPT"]` ,
150+ ValidateFunc : verify .ValidateEnum ([]string {"SQL" , "JAVASCRIPT" , "PYTHON" , "JAVA" , "SCALA" , " " }),
151+ Description : `The language of the routine. Possible values: ["SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA" ]` ,
152152 },
153153 "return_table_type" : {
154154 Type : schema .TypeString ,
@@ -176,6 +176,90 @@ d the order of values or replaced STRUCT field type with RECORD field type, we c
176176cannot suppress the recurring diff this causes. As a workaround, we recommend using
177177the schema as returned by the API.` ,
178178 },
179+ "spark_options" : {
180+ Type : schema .TypeList ,
181+ Optional : true ,
182+ Description : `Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure.` ,
183+ MaxItems : 1 ,
184+ Elem : & schema.Resource {
185+ Schema : map [string ]* schema.Schema {
186+ "archive_uris" : {
187+ Type : schema .TypeList ,
188+ Computed : true ,
189+ Optional : true ,
190+ Description : `Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.` ,
191+ Elem : & schema.Schema {
192+ Type : schema .TypeString ,
193+ },
194+ },
195+ "connection" : {
196+ Type : schema .TypeString ,
197+ Optional : true ,
198+ Description : `Fully qualified name of the user-provided Spark connection object.
199+ Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"` ,
200+ },
201+ "container_image" : {
202+ Type : schema .TypeString ,
203+ Optional : true ,
204+ Description : `Custom container image for the runtime environment.` ,
205+ },
206+ "file_uris" : {
207+ Type : schema .TypeList ,
208+ Computed : true ,
209+ Optional : true ,
210+ Description : `Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.` ,
211+ Elem : & schema.Schema {
212+ Type : schema .TypeString ,
213+ },
214+ },
215+ "jar_uris" : {
216+ Type : schema .TypeList ,
217+ Computed : true ,
218+ Optional : true ,
219+ Description : `JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.` ,
220+ Elem : & schema.Schema {
221+ Type : schema .TypeString ,
222+ },
223+ },
224+ "main_class" : {
225+ Type : schema .TypeString ,
226+ Optional : true ,
227+ Description : `The fully qualified name of a class in jarUris, for example, com.example.wordcount.
228+ Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.` ,
229+ },
230+ "main_file_uri" : {
231+ Type : schema .TypeString ,
232+ Optional : true ,
233+ Description : `The main file/jar URI of the Spark application.
234+ Exactly one of the definitionBody field and the mainFileUri field must be set for Python.
235+ Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.` ,
236+ },
237+ "properties" : {
238+ Type : schema .TypeMap ,
239+ Computed : true ,
240+ Optional : true ,
241+ Description : `Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
242+ For more information, see Apache Spark and the procedure option list.
243+ An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.` ,
244+ Elem : & schema.Schema {Type : schema .TypeString },
245+ },
246+ "py_file_uris" : {
247+ Type : schema .TypeList ,
248+ Computed : true ,
249+ Optional : true ,
250+ Description : `Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.` ,
251+ Elem : & schema.Schema {
252+ Type : schema .TypeString ,
253+ },
254+ },
255+ "runtime_version" : {
256+ Type : schema .TypeString ,
257+ Optional : true ,
258+ Description : `Runtime version. If not specified, the default runtime version is used.` ,
259+ },
260+ },
261+ },
262+ },
179263 "creation_time" : {
180264 Type : schema .TypeInt ,
181265 Computed : true ,
@@ -267,6 +351,12 @@ func resourceBigQueryRoutineCreate(d *schema.ResourceData, meta interface{}) err
267351 } else if v , ok := d .GetOkExists ("determinism_level" ); ! tpgresource .IsEmptyValue (reflect .ValueOf (determinismLevelProp )) && (ok || ! reflect .DeepEqual (v , determinismLevelProp )) {
268352 obj ["determinismLevel" ] = determinismLevelProp
269353 }
354+ sparkOptionsProp , err := expandBigQueryRoutineSparkOptions (d .Get ("spark_options" ), d , config )
355+ if err != nil {
356+ return err
357+ } else if v , ok := d .GetOkExists ("spark_options" ); ! tpgresource .IsEmptyValue (reflect .ValueOf (sparkOptionsProp )) && (ok || ! reflect .DeepEqual (v , sparkOptionsProp )) {
358+ obj ["sparkOptions" ] = sparkOptionsProp
359+ }
270360
271361 url , err := tpgresource .ReplaceVars (d , config , "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines" )
272362 if err != nil {
@@ -400,6 +490,9 @@ func resourceBigQueryRoutineRead(d *schema.ResourceData, meta interface{}) error
400490 if err := d .Set ("determinism_level" , flattenBigQueryRoutineDeterminismLevel (res ["determinismLevel" ], d , config )); err != nil {
401491 return fmt .Errorf ("Error reading Routine: %s" , err )
402492 }
493+ if err := d .Set ("spark_options" , flattenBigQueryRoutineSparkOptions (res ["sparkOptions" ], d , config )); err != nil {
494+ return fmt .Errorf ("Error reading Routine: %s" , err )
495+ }
403496
404497 return nil
405498}
@@ -480,6 +573,12 @@ func resourceBigQueryRoutineUpdate(d *schema.ResourceData, meta interface{}) err
480573 } else if v , ok := d .GetOkExists ("determinism_level" ); ! tpgresource .IsEmptyValue (reflect .ValueOf (v )) && (ok || ! reflect .DeepEqual (v , determinismLevelProp )) {
481574 obj ["determinismLevel" ] = determinismLevelProp
482575 }
576+ sparkOptionsProp , err := expandBigQueryRoutineSparkOptions (d .Get ("spark_options" ), d , config )
577+ if err != nil {
578+ return err
579+ } else if v , ok := d .GetOkExists ("spark_options" ); ! tpgresource .IsEmptyValue (reflect .ValueOf (v )) && (ok || ! reflect .DeepEqual (v , sparkOptionsProp )) {
580+ obj ["sparkOptions" ] = sparkOptionsProp
581+ }
483582
484583 url , err := tpgresource .ReplaceVars (d , config , "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}" )
485584 if err != nil {
@@ -727,6 +826,77 @@ func flattenBigQueryRoutineDeterminismLevel(v interface{}, d *schema.ResourceDat
727826 return v
728827}
729828
829+ func flattenBigQueryRoutineSparkOptions (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
830+ if v == nil {
831+ return nil
832+ }
833+ original := v .(map [string ]interface {})
834+ if len (original ) == 0 {
835+ return nil
836+ }
837+ transformed := make (map [string ]interface {})
838+ transformed ["connection" ] =
839+ flattenBigQueryRoutineSparkOptionsConnection (original ["connection" ], d , config )
840+ transformed ["runtime_version" ] =
841+ flattenBigQueryRoutineSparkOptionsRuntimeVersion (original ["runtimeVersion" ], d , config )
842+ transformed ["container_image" ] =
843+ flattenBigQueryRoutineSparkOptionsContainerImage (original ["containerImage" ], d , config )
844+ transformed ["properties" ] =
845+ flattenBigQueryRoutineSparkOptionsProperties (original ["properties" ], d , config )
846+ transformed ["main_file_uri" ] =
847+ flattenBigQueryRoutineSparkOptionsMainFileUri (original ["mainFileUri" ], d , config )
848+ transformed ["py_file_uris" ] =
849+ flattenBigQueryRoutineSparkOptionsPyFileUris (original ["pyFileUris" ], d , config )
850+ transformed ["jar_uris" ] =
851+ flattenBigQueryRoutineSparkOptionsJarUris (original ["jarUris" ], d , config )
852+ transformed ["file_uris" ] =
853+ flattenBigQueryRoutineSparkOptionsFileUris (original ["fileUris" ], d , config )
854+ transformed ["archive_uris" ] =
855+ flattenBigQueryRoutineSparkOptionsArchiveUris (original ["archiveUris" ], d , config )
856+ transformed ["main_class" ] =
857+ flattenBigQueryRoutineSparkOptionsMainClass (original ["mainClass" ], d , config )
858+ return []interface {}{transformed }
859+ }
860+ func flattenBigQueryRoutineSparkOptionsConnection (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
861+ return v
862+ }
863+
864+ func flattenBigQueryRoutineSparkOptionsRuntimeVersion (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
865+ return v
866+ }
867+
868+ func flattenBigQueryRoutineSparkOptionsContainerImage (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
869+ return v
870+ }
871+
872+ func flattenBigQueryRoutineSparkOptionsProperties (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
873+ return v
874+ }
875+
876+ func flattenBigQueryRoutineSparkOptionsMainFileUri (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
877+ return v
878+ }
879+
880+ func flattenBigQueryRoutineSparkOptionsPyFileUris (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
881+ return v
882+ }
883+
884+ func flattenBigQueryRoutineSparkOptionsJarUris (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
885+ return v
886+ }
887+
888+ func flattenBigQueryRoutineSparkOptionsFileUris (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
889+ return v
890+ }
891+
892+ func flattenBigQueryRoutineSparkOptionsArchiveUris (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
893+ return v
894+ }
895+
896+ func flattenBigQueryRoutineSparkOptionsMainClass (v interface {}, d * schema.ResourceData , config * transport_tpg.Config ) interface {} {
897+ return v
898+ }
899+
730900func expandBigQueryRoutineRoutineReference (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
731901
732902 transformed := make (map [string ]interface {})
@@ -852,3 +1022,132 @@ func expandBigQueryRoutineDescription(v interface{}, d tpgresource.TerraformReso
8521022func expandBigQueryRoutineDeterminismLevel (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
8531023 return v , nil
8541024}
1025+
1026+ func expandBigQueryRoutineSparkOptions (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1027+ l := v .([]interface {})
1028+ if len (l ) == 0 || l [0 ] == nil {
1029+ return nil , nil
1030+ }
1031+ raw := l [0 ]
1032+ original := raw .(map [string ]interface {})
1033+ transformed := make (map [string ]interface {})
1034+
1035+ transformedConnection , err := expandBigQueryRoutineSparkOptionsConnection (original ["connection" ], d , config )
1036+ if err != nil {
1037+ return nil , err
1038+ } else if val := reflect .ValueOf (transformedConnection ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1039+ transformed ["connection" ] = transformedConnection
1040+ }
1041+
1042+ transformedRuntimeVersion , err := expandBigQueryRoutineSparkOptionsRuntimeVersion (original ["runtime_version" ], d , config )
1043+ if err != nil {
1044+ return nil , err
1045+ } else if val := reflect .ValueOf (transformedRuntimeVersion ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1046+ transformed ["runtimeVersion" ] = transformedRuntimeVersion
1047+ }
1048+
1049+ transformedContainerImage , err := expandBigQueryRoutineSparkOptionsContainerImage (original ["container_image" ], d , config )
1050+ if err != nil {
1051+ return nil , err
1052+ } else if val := reflect .ValueOf (transformedContainerImage ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1053+ transformed ["containerImage" ] = transformedContainerImage
1054+ }
1055+
1056+ transformedProperties , err := expandBigQueryRoutineSparkOptionsProperties (original ["properties" ], d , config )
1057+ if err != nil {
1058+ return nil , err
1059+ } else if val := reflect .ValueOf (transformedProperties ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1060+ transformed ["properties" ] = transformedProperties
1061+ }
1062+
1063+ transformedMainFileUri , err := expandBigQueryRoutineSparkOptionsMainFileUri (original ["main_file_uri" ], d , config )
1064+ if err != nil {
1065+ return nil , err
1066+ } else if val := reflect .ValueOf (transformedMainFileUri ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1067+ transformed ["mainFileUri" ] = transformedMainFileUri
1068+ }
1069+
1070+ transformedPyFileUris , err := expandBigQueryRoutineSparkOptionsPyFileUris (original ["py_file_uris" ], d , config )
1071+ if err != nil {
1072+ return nil , err
1073+ } else if val := reflect .ValueOf (transformedPyFileUris ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1074+ transformed ["pyFileUris" ] = transformedPyFileUris
1075+ }
1076+
1077+ transformedJarUris , err := expandBigQueryRoutineSparkOptionsJarUris (original ["jar_uris" ], d , config )
1078+ if err != nil {
1079+ return nil , err
1080+ } else if val := reflect .ValueOf (transformedJarUris ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1081+ transformed ["jarUris" ] = transformedJarUris
1082+ }
1083+
1084+ transformedFileUris , err := expandBigQueryRoutineSparkOptionsFileUris (original ["file_uris" ], d , config )
1085+ if err != nil {
1086+ return nil , err
1087+ } else if val := reflect .ValueOf (transformedFileUris ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1088+ transformed ["fileUris" ] = transformedFileUris
1089+ }
1090+
1091+ transformedArchiveUris , err := expandBigQueryRoutineSparkOptionsArchiveUris (original ["archive_uris" ], d , config )
1092+ if err != nil {
1093+ return nil , err
1094+ } else if val := reflect .ValueOf (transformedArchiveUris ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1095+ transformed ["archiveUris" ] = transformedArchiveUris
1096+ }
1097+
1098+ transformedMainClass , err := expandBigQueryRoutineSparkOptionsMainClass (original ["main_class" ], d , config )
1099+ if err != nil {
1100+ return nil , err
1101+ } else if val := reflect .ValueOf (transformedMainClass ); val .IsValid () && ! tpgresource .IsEmptyValue (val ) {
1102+ transformed ["mainClass" ] = transformedMainClass
1103+ }
1104+
1105+ return transformed , nil
1106+ }
1107+
1108+ func expandBigQueryRoutineSparkOptionsConnection (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1109+ return v , nil
1110+ }
1111+
1112+ func expandBigQueryRoutineSparkOptionsRuntimeVersion (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1113+ return v , nil
1114+ }
1115+
1116+ func expandBigQueryRoutineSparkOptionsContainerImage (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1117+ return v , nil
1118+ }
1119+
1120+ func expandBigQueryRoutineSparkOptionsProperties (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (map [string ]string , error ) {
1121+ if v == nil {
1122+ return map [string ]string {}, nil
1123+ }
1124+ m := make (map [string ]string )
1125+ for k , val := range v .(map [string ]interface {}) {
1126+ m [k ] = val .(string )
1127+ }
1128+ return m , nil
1129+ }
1130+
1131+ func expandBigQueryRoutineSparkOptionsMainFileUri (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1132+ return v , nil
1133+ }
1134+
1135+ func expandBigQueryRoutineSparkOptionsPyFileUris (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1136+ return v , nil
1137+ }
1138+
1139+ func expandBigQueryRoutineSparkOptionsJarUris (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1140+ return v , nil
1141+ }
1142+
1143+ func expandBigQueryRoutineSparkOptionsFileUris (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1144+ return v , nil
1145+ }
1146+
1147+ func expandBigQueryRoutineSparkOptionsArchiveUris (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1148+ return v , nil
1149+ }
1150+
1151+ func expandBigQueryRoutineSparkOptionsMainClass (v interface {}, d tpgresource.TerraformResourceData , config * transport_tpg.Config ) (interface {}, error ) {
1152+ return v , nil
1153+ }
0 commit comments