Skip to content

Commit d51657a

Browse files
Add PDS member support to region_jcl module
1 parent 9e99f65 commit d51657a

File tree

12 files changed

+1242
-89
lines changed

12 files changed

+1242
-89
lines changed

plugins/module_utils/_data_set_utils.py

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@
77

88
from __future__ import (absolute_import, division, print_function)
99

10-
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import AnsibleModuleHelper
1110
__metaclass__ = type
1211
import re
13-
12+
import tempfile
13+
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.ansible_module import AnsibleModuleHelper
1414
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution, MVSExecutionException
1515
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.zos_mvs_raw import MVSCmd
1616
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DDStatement, StdoutDefinition, DatasetDefinition, StdinDefinition
@@ -214,6 +214,9 @@ def _run_listds(location): # type: (str) -> tuple[list[_execution], bool, str]
214214
if listds_response.rc == 8 and "NOT IN CATALOG" in listds_response.stdout:
215215
return executions, False, "NONE"
216216

217+
if listds_response.rc == 4 and "MEMBER NAME NOT FOUND" in listds_response.stdout:
218+
return executions, False, "NONE"
219+
217220
# Exists
218221

219222
if listds_response.rc != 0:
@@ -287,3 +290,23 @@ def _read_data_set_content(data_set_name):
287290
"RC {0} when reading content from data set {1}".format(
288291
rc, data_set_name), executions)
289292
return executions, stdout
293+
294+
295+
def _write_jcl_to_data_set(jcl, data_set_name):
296+
"""Writes generated JCL content to the specified data set
297+
"""
298+
executions = []
299+
300+
temp = tempfile.NamedTemporaryFile(delete=True)
301+
with open(temp.name, "w") as f:
302+
f.write(jcl)
303+
rc, stdout, stderr = _execute_command("cp -O u {0} \"//'{1}'\"".format(temp.name, data_set_name))
304+
executions.append(
305+
_execution(
306+
name="Copy JCL contents to data set",
307+
rc=rc,
308+
stdout=stdout,
309+
stderr=stderr))
310+
if rc != 0:
311+
raise MVSExecutionException("Failed to copy JCL content to data set", executions)
312+
return executions

plugins/module_utils/_jcl_helper.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
# FOR INTERNAL USE IN THE COLLECTION ONLY.
77

88
from __future__ import absolute_import, division, print_function
9-
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import _execution, MVSExecutionException
10-
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet, DatasetWriteError
119

1210
__metaclass__ = type
1311
import re
@@ -377,13 +375,3 @@ def _add_single_quotes_to_text(value):
377375
value = value.replace("'", "''")
378376

379377
return "'{0}'".format(value)
380-
381-
@staticmethod
382-
def _write_jcl_to_data_set(jcl, data_set_name):
383-
"""Writes generated JCL content to the specified data set
384-
"""
385-
try:
386-
DataSet.write(data_set_name, jcl)
387-
return [_execution("Copy JCL contents to data set", 0, "", "")]
388-
except DatasetWriteError as e:
389-
raise MVSExecutionException("Failed to copy JCL content to data set", [_execution("Copy JCL contents to data set", 1, "", e.msg)])

plugins/modules/region_jcl.py

Lines changed: 72 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,7 @@
183183

184184
import string
185185
import math
186+
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import is_member
186187
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set import (
187188
MEGABYTES,
188189
REGION_DATA_SETS,
@@ -193,12 +194,13 @@
193194
WARM,
194195
DataSet
195196
)
196-
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _read_data_set_content
197+
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _read_data_set_content, _write_jcl_to_data_set
197198
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.dd_statement import DatasetDefinition
198199
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._jcl_helper import (
199200
JCLHelper, DLM, DD_INSTREAM, CONTENT, END_INSTREAM, JOB_CARD, EXECS, JOB_NAME, DDS, NAME
200201
)
201202
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._response import MVSExecutionException
203+
from ansible_collections.ibm.ibm_zos_cics.plugins.module_utils._data_set_utils import _run_listds
202204

203205

204206
DFHSTART = "dfhstart"
@@ -207,7 +209,7 @@
207209

208210

209211
region_data_sets_list = ['dfhauxt', 'dfhbuxt', 'dfhcsd', 'dfhgcd', 'dfhintra',
210-
'dfhlcd', 'dfhlrq', 'dfhtemp', 'dfhdmpa', 'dfhdmpb', 'dfhstart']
212+
'dfhlcd', 'dfhlrq', 'dfhtemp', 'dfhdmpa', 'dfhdmpb']
211213
APPLID = 'applid'
212214
CEEMSG = 'ceemsg'
213215
CEEOUT = 'ceeout'
@@ -227,7 +229,9 @@
227229
MSGUSR = 'msgusr'
228230
OMIT = 'omit'
229231
OUTPUT_DATA_SETS = 'output_data_sets'
232+
PARTITIONED = 'Partitioned'
230233
PGM = 'pgm'
234+
SEQUENTIAL = 'Sequential'
231235
SIT_PARAMETERS = 'sit_parameters'
232236
SHR = 'SHR'
233237
STEPLIB = 'steplib'
@@ -245,12 +249,24 @@ def __init__(self):
245249
self.jcl = ""
246250
super(AnsibleRegionJCLModule, self).__init__(1, 1)
247251
self.name = self.region_param[DFHSTART][DSN].upper()
248-
self.expected_data_set_organization = "Sequential"
252+
self.base_data_set_name = ""
253+
self.base_exists = False
254+
self.base_data_set_organization = ""
249255
self.dds = []
250256
self.jcl_helper = JCLHelper()
251257
self.primary_unit = ""
252258
self.secondary_unit = ""
253259

260+
def check_member(self):
261+
ds_name_param = self._module.params[REGION_DATA_SETS][DFHSTART][DSN]
262+
return is_member(ds_name_param)
263+
264+
def get_expected_ds_org(self):
265+
if self.member:
266+
return PARTITIONED
267+
else:
268+
return SEQUENTIAL
269+
254270
def get_result(self): # type: () -> dict
255271
result = super().get_result()
256272
result.update({
@@ -270,6 +286,12 @@ def _get_arg_spec(self): # type: () -> dict
270286
def get_arg_defs(self): # type: () -> dict
271287
defs = super().get_arg_defs()
272288
defs.update(self.init_argument_spec())
289+
self.member = self.check_member()
290+
self.expected_data_set_organization = self.get_expected_ds_org()
291+
if self.member:
292+
self.update_arg_def(defs[REGION_DATA_SETS]["options"][DFHSTART]["options"][DSN], "data_set_member")
293+
else:
294+
region_data_sets_list.append("dfhstart")
273295
self.batch_update_arg_defs_for_ds(defs, REGION_DATA_SETS, region_data_sets_list, True)
274296
self.batch_update_arg_defs_for_ds(defs, CICS_DATA_SETS, ["sdfhauth", "sdfhlic", "sdfhload"])
275297
self.batch_update_arg_defs_for_ds(defs, LE_DATA_SETS, ["sceecics", "sceerun", "sceerun2"])
@@ -298,7 +320,7 @@ def update_arg_def(self, dict_to_update, arg_type="data_set_base"):
298320
dict_to_update.pop("type")
299321

300322
def calculate_size_parameters(self):
301-
# Default primary and seconddary units to the space_type module arg
323+
# Default primary and secondary units to the space_type module arg
302324
self.primary_unit = self.unit
303325
self.secondary_unit = self.unit
304326

@@ -317,23 +339,29 @@ def calculate_size_parameters(self):
317339

318340
def create_data_set(self): # type: () -> None
319341
self.calculate_size_parameters()
320-
data_set_def = DatasetDefinition(
321-
dataset_name=self.name,
322-
primary=self.primary,
323-
secondary=self.secondary,
324-
primary_unit=self.primary_unit,
325-
secondary_unit=self.secondary_unit,
326-
volumes=self.volumes,
327-
block_size=4096,
328-
record_length=80,
329-
record_format="FB",
330-
disposition="NEW",
331-
normal_disposition="CATALOG",
332-
conditional_disposition="DELETE",
333-
type="SEQ"
334-
)
335-
336-
super().build_seq_data_set(DFHSTART, data_set_def)
342+
if self.member:
343+
if not self.base_exists:
344+
self._fail("Base data set {0} does not exist. Can only create a member in an existing PDS/E".format(self.base_data_set_name))
345+
if self.base_data_set_organization != PARTITIONED:
346+
self._fail("Base data set {0} is not a PDS/E. Member cannot be created in base data set".format(self.base_data_set_name))
347+
else:
348+
data_set_def = DatasetDefinition(
349+
dataset_name=self.name,
350+
primary=self.primary,
351+
secondary=self.secondary,
352+
primary_unit=self.primary_unit,
353+
secondary_unit=self.secondary_unit,
354+
volumes=self.volumes,
355+
block_size=4096,
356+
record_length=80,
357+
record_format="FB",
358+
disposition="NEW",
359+
normal_disposition="CATALOG",
360+
conditional_disposition="DELETE",
361+
type="SEQ"
362+
)
363+
super().build_seq_data_set(DFHSTART, data_set_def)
364+
337365
self.write_jcl()
338366

339367
def generate_jcl(self):
@@ -343,8 +371,9 @@ def generate_jcl(self):
343371

344372
def write_jcl(self):
345373
try:
346-
jcl_writer_execution = JCLHelper._write_jcl_to_data_set(self.jcl, self.name)
374+
jcl_writer_execution = _write_jcl_to_data_set(self.jcl, self.name)
347375
self.executions.extend(jcl_writer_execution)
376+
self.changed = True
348377
except MVSExecutionException as e:
349378
self.executions.extend(e.executions)
350379
super()._fail(e.message)
@@ -353,18 +382,22 @@ def init_data_set(self):
353382
self.generate_jcl()
354383
if self.exists:
355384
super().delete_data_set()
356-
super().update_data_set_state()
385+
self.update_data_set_state()
357386
self.create_data_set()
358387
else:
359388
self.create_data_set()
360389

361390
def warm_target_state(self):
362-
if self.exists:
391+
if (self.exists and not self.member) or (self.exists and self.base_exists and self.member):
363392
self.generate_jcl()
364393
try:
365394
jcl_writer_execution, jcl_data = _read_data_set_content(self.name)
366395
self.executions.extend(jcl_writer_execution)
367-
if jcl_data.strip() != self.jcl.strip():
396+
gen_jcl = set(self.jcl.split())
397+
existing_jcl = set(jcl_data.split())
398+
399+
jcl_diff = gen_jcl.symmetric_difference(existing_jcl)
400+
if len(jcl_diff) != 0:
368401
super()._fail("Data set {0} does not contain the expected Region JCL.".format(self.name))
369402
except MVSExecutionException as e:
370403
self.executions.extend(e.executions)
@@ -382,6 +415,20 @@ def execute_target_state(self): # type: () -> None
382415
else:
383416
super().invalid_target_state()
384417

418+
def update_data_set_state(self): # type: () -> None
419+
try:
420+
if self.member:
421+
self.base_data_set_name = self.name.split("(")[0]
422+
423+
listds_executions, self.base_exists, self.base_data_set_organization = _run_listds(self.base_data_set_name)
424+
self.executions.extend(listds_executions)
425+
426+
listds_executions, self.exists, self.data_set_organization = _run_listds(self.name)
427+
self.executions.extend(listds_executions)
428+
except MVSExecutionException as e:
429+
self.executions.extend(e.executions)
430+
self._fail(e.message)
431+
385432
def _build_data_structure_of_arguments(self):
386433
self._remove_none_values_from_dict(self._module.params)
387434
self._populate_job_card_dict()

0 commit comments

Comments
 (0)