183
183
184
184
import string
185
185
import math
186
+ from ansible_collections .ibm .ibm_zos_core .plugins .module_utils .data_set import is_member
186
187
from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._data_set import (
187
188
MEGABYTES ,
188
189
REGION_DATA_SETS ,
193
194
WARM ,
194
195
DataSet
195
196
)
196
- from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._data_set_utils import _read_data_set_content
197
+ from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._data_set_utils import _read_data_set_content , _write_jcl_to_data_set
197
198
from ansible_collections .ibm .ibm_zos_core .plugins .module_utils .dd_statement import DatasetDefinition
198
199
from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._jcl_helper import (
199
200
JCLHelper , DLM , DD_INSTREAM , CONTENT , END_INSTREAM , JOB_CARD , EXECS , JOB_NAME , DDS , NAME
200
201
)
201
202
from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._response import MVSExecutionException
203
+ from ansible_collections .ibm .ibm_zos_cics .plugins .module_utils ._data_set_utils import _run_listds
202
204
203
205
204
206
DFHSTART = "dfhstart"
207
209
208
210
209
211
region_data_sets_list = ['dfhauxt' , 'dfhbuxt' , 'dfhcsd' , 'dfhgcd' , 'dfhintra' ,
210
- 'dfhlcd' , 'dfhlrq' , 'dfhtemp' , 'dfhdmpa' , 'dfhdmpb' , 'dfhstart' ]
212
+ 'dfhlcd' , 'dfhlrq' , 'dfhtemp' , 'dfhdmpa' , 'dfhdmpb' ]
211
213
APPLID = 'applid'
212
214
CEEMSG = 'ceemsg'
213
215
CEEOUT = 'ceeout'
227
229
MSGUSR = 'msgusr'
228
230
OMIT = 'omit'
229
231
OUTPUT_DATA_SETS = 'output_data_sets'
232
+ PARTITIONED = 'Partitioned'
230
233
PGM = 'pgm'
234
+ SEQUENTIAL = 'Sequential'
231
235
SIT_PARAMETERS = 'sit_parameters'
232
236
SHR = 'SHR'
233
237
STEPLIB = 'steplib'
@@ -245,12 +249,24 @@ def __init__(self):
245
249
self .jcl = ""
246
250
super (AnsibleRegionJCLModule , self ).__init__ (1 , 1 )
247
251
self .name = self .region_param [DFHSTART ][DSN ].upper ()
248
- self .expected_data_set_organization = "Sequential"
252
+ self .base_data_set_name = ""
253
+ self .base_exists = False
254
+ self .base_data_set_organization = ""
249
255
self .dds = []
250
256
self .jcl_helper = JCLHelper ()
251
257
self .primary_unit = ""
252
258
self .secondary_unit = ""
253
259
260
+ def check_member (self ):
261
+ ds_name_param = self ._module .params [REGION_DATA_SETS ][DFHSTART ][DSN ]
262
+ return is_member (ds_name_param )
263
+
264
+ def get_expected_ds_org (self ):
265
+ if self .member :
266
+ return PARTITIONED
267
+ else :
268
+ return SEQUENTIAL
269
+
254
270
def get_result (self ): # type: () -> dict
255
271
result = super ().get_result ()
256
272
result .update ({
@@ -270,6 +286,12 @@ def _get_arg_spec(self): # type: () -> dict
270
286
def get_arg_defs (self ): # type: () -> dict
271
287
defs = super ().get_arg_defs ()
272
288
defs .update (self .init_argument_spec ())
289
+ self .member = self .check_member ()
290
+ self .expected_data_set_organization = self .get_expected_ds_org ()
291
+ if self .member :
292
+ self .update_arg_def (defs [REGION_DATA_SETS ]["options" ][DFHSTART ]["options" ][DSN ], "data_set_member" )
293
+ else :
294
+ region_data_sets_list .append ("dfhstart" )
273
295
self .batch_update_arg_defs_for_ds (defs , REGION_DATA_SETS , region_data_sets_list , True )
274
296
self .batch_update_arg_defs_for_ds (defs , CICS_DATA_SETS , ["sdfhauth" , "sdfhlic" , "sdfhload" ])
275
297
self .batch_update_arg_defs_for_ds (defs , LE_DATA_SETS , ["sceecics" , "sceerun" , "sceerun2" ])
@@ -298,7 +320,7 @@ def update_arg_def(self, dict_to_update, arg_type="data_set_base"):
298
320
dict_to_update .pop ("type" )
299
321
300
322
def calculate_size_parameters (self ):
301
- # Default primary and seconddary units to the space_type module arg
323
+ # Default primary and secondary units to the space_type module arg
302
324
self .primary_unit = self .unit
303
325
self .secondary_unit = self .unit
304
326
@@ -317,23 +339,29 @@ def calculate_size_parameters(self):
317
339
318
340
def create_data_set (self ): # type: () -> None
319
341
self .calculate_size_parameters ()
320
- data_set_def = DatasetDefinition (
321
- dataset_name = self .name ,
322
- primary = self .primary ,
323
- secondary = self .secondary ,
324
- primary_unit = self .primary_unit ,
325
- secondary_unit = self .secondary_unit ,
326
- volumes = self .volumes ,
327
- block_size = 4096 ,
328
- record_length = 80 ,
329
- record_format = "FB" ,
330
- disposition = "NEW" ,
331
- normal_disposition = "CATALOG" ,
332
- conditional_disposition = "DELETE" ,
333
- type = "SEQ"
334
- )
335
-
336
- super ().build_seq_data_set (DFHSTART , data_set_def )
342
+ if self .member :
343
+ if not self .base_exists :
344
+ self ._fail ("Base data set {0} does not exist. Can only create a member in an existing PDS/E" .format (self .base_data_set_name ))
345
+ if self .base_data_set_organization != PARTITIONED :
346
+ self ._fail ("Base data set {0} is not a PDS/E. Member cannot be created in base data set" .format (self .base_data_set_name ))
347
+ else :
348
+ data_set_def = DatasetDefinition (
349
+ dataset_name = self .name ,
350
+ primary = self .primary ,
351
+ secondary = self .secondary ,
352
+ primary_unit = self .primary_unit ,
353
+ secondary_unit = self .secondary_unit ,
354
+ volumes = self .volumes ,
355
+ block_size = 4096 ,
356
+ record_length = 80 ,
357
+ record_format = "FB" ,
358
+ disposition = "NEW" ,
359
+ normal_disposition = "CATALOG" ,
360
+ conditional_disposition = "DELETE" ,
361
+ type = "SEQ"
362
+ )
363
+ super ().build_seq_data_set (DFHSTART , data_set_def )
364
+
337
365
self .write_jcl ()
338
366
339
367
def generate_jcl (self ):
@@ -343,8 +371,9 @@ def generate_jcl(self):
343
371
344
372
def write_jcl (self ):
345
373
try :
346
- jcl_writer_execution = JCLHelper . _write_jcl_to_data_set (self .jcl , self .name )
374
+ jcl_writer_execution = _write_jcl_to_data_set (self .jcl , self .name )
347
375
self .executions .extend (jcl_writer_execution )
376
+ self .changed = True
348
377
except MVSExecutionException as e :
349
378
self .executions .extend (e .executions )
350
379
super ()._fail (e .message )
@@ -353,18 +382,22 @@ def init_data_set(self):
353
382
self .generate_jcl ()
354
383
if self .exists :
355
384
super ().delete_data_set ()
356
- super () .update_data_set_state ()
385
+ self .update_data_set_state ()
357
386
self .create_data_set ()
358
387
else :
359
388
self .create_data_set ()
360
389
361
390
def warm_target_state (self ):
362
- if self .exists :
391
+ if ( self .exists and not self . member ) or ( self . exists and self . base_exists and self . member ) :
363
392
self .generate_jcl ()
364
393
try :
365
394
jcl_writer_execution , jcl_data = _read_data_set_content (self .name )
366
395
self .executions .extend (jcl_writer_execution )
367
- if jcl_data .strip () != self .jcl .strip ():
396
+ gen_jcl = set (self .jcl .split ())
397
+ existing_jcl = set (jcl_data .split ())
398
+
399
+ jcl_diff = gen_jcl .symmetric_difference (existing_jcl )
400
+ if len (jcl_diff ) != 0 :
368
401
super ()._fail ("Data set {0} does not contain the expected Region JCL." .format (self .name ))
369
402
except MVSExecutionException as e :
370
403
self .executions .extend (e .executions )
@@ -382,6 +415,20 @@ def execute_target_state(self): # type: () -> None
382
415
else :
383
416
super ().invalid_target_state ()
384
417
418
+ def update_data_set_state (self ): # type: () -> None
419
+ try :
420
+ if self .member :
421
+ self .base_data_set_name = self .name .split ("(" )[0 ]
422
+
423
+ listds_executions , self .base_exists , self .base_data_set_organization = _run_listds (self .base_data_set_name )
424
+ self .executions .extend (listds_executions )
425
+
426
+ listds_executions , self .exists , self .data_set_organization = _run_listds (self .name )
427
+ self .executions .extend (listds_executions )
428
+ except MVSExecutionException as e :
429
+ self .executions .extend (e .executions )
430
+ self ._fail (e .message )
431
+
385
432
def _build_data_structure_of_arguments (self ):
386
433
self ._remove_none_values_from_dict (self ._module .params )
387
434
self ._populate_job_card_dict ()
0 commit comments